summaryrefslogtreecommitdiff
path: root/vendor/golang.org/x/tools
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/golang.org/x/tools')
-rw-r--r--vendor/golang.org/x/tools/LICENSE27
-rw-r--r--vendor/golang.org/x/tools/PATENTS22
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/enclosing.go627
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/enclosing_test.go195
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go470
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports_test.go1818
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/rewrite.go477
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/rewrite_test.go248
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/util.go14
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/allpackages.go198
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/allpackages_test.go78
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/fakecontext.go108
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/overlay.go103
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/overlay_test.go70
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/tags.go75
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/tags_test.go28
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/util.go212
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/util_test.go72
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/util_windows_test.go48
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/callgraph.go129
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/cha.go139
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/cha_test.go111
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/testdata/func.go23
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/testdata/iface.go65
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/testdata/issue23925.go38
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/testdata/recv.go37
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/util.go181
-rw-r--r--vendor/golang.org/x/tools/go/loader/cgo.go207
-rw-r--r--vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go39
-rw-r--r--vendor/golang.org/x/tools/go/loader/doc.go205
-rw-r--r--vendor/golang.org/x/tools/go/loader/example_test.go179
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader.go1077
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader_test.go816
-rw-r--r--vendor/golang.org/x/tools/go/loader/stdlib_test.go201
-rw-r--r--vendor/golang.org/x/tools/go/loader/testdata/a.go1
-rw-r--r--vendor/golang.org/x/tools/go/loader/testdata/b.go1
-rw-r--r--vendor/golang.org/x/tools/go/loader/testdata/badpkgdecl.go1
-rw-r--r--vendor/golang.org/x/tools/go/loader/util.go124
-rw-r--r--vendor/golang.org/x/tools/go/ssa/blockopt.go187
-rw-r--r--vendor/golang.org/x/tools/go/ssa/builder.go2379
-rw-r--r--vendor/golang.org/x/tools/go/ssa/builder_test.go500
-rw-r--r--vendor/golang.org/x/tools/go/ssa/const.go169
-rw-r--r--vendor/golang.org/x/tools/go/ssa/create.go263
-rw-r--r--vendor/golang.org/x/tools/go/ssa/doc.go123
-rw-r--r--vendor/golang.org/x/tools/go/ssa/dom.go341
-rw-r--r--vendor/golang.org/x/tools/go/ssa/emit.go468
-rw-r--r--vendor/golang.org/x/tools/go/ssa/example_test.go138
-rw-r--r--vendor/golang.org/x/tools/go/ssa/func.go689
-rw-r--r--vendor/golang.org/x/tools/go/ssa/identical.go7
-rw-r--r--vendor/golang.org/x/tools/go/ssa/identical_17.go7
-rw-r--r--vendor/golang.org/x/tools/go/ssa/identical_test.go9
-rw-r--r--vendor/golang.org/x/tools/go/ssa/lift.go653
-rw-r--r--vendor/golang.org/x/tools/go/ssa/lvalue.go120
-rw-r--r--vendor/golang.org/x/tools/go/ssa/methods.go239
-rw-r--r--vendor/golang.org/x/tools/go/ssa/mode.go100
-rw-r--r--vendor/golang.org/x/tools/go/ssa/print.go431
-rw-r--r--vendor/golang.org/x/tools/go/ssa/sanity.go521
-rw-r--r--vendor/golang.org/x/tools/go/ssa/source.go293
-rw-r--r--vendor/golang.org/x/tools/go/ssa/source_test.go397
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssa.go1696
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/load.go95
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/load_test.go64
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/switch.go234
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/switch_test.go74
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/testdata/switches.go357
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/visit.go79
-rw-r--r--vendor/golang.org/x/tools/go/ssa/stdlib_test.go151
-rw-r--r--vendor/golang.org/x/tools/go/ssa/testdata/objlookup.go160
-rw-r--r--vendor/golang.org/x/tools/go/ssa/testdata/structconv.go24
-rw-r--r--vendor/golang.org/x/tools/go/ssa/testdata/valueforexpr.go152
-rw-r--r--vendor/golang.org/x/tools/go/ssa/testmain.go267
-rw-r--r--vendor/golang.org/x/tools/go/ssa/testmain_test.go124
-rw-r--r--vendor/golang.org/x/tools/go/ssa/util.go119
-rw-r--r--vendor/golang.org/x/tools/go/ssa/wrappers.go294
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/example_test.go67
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/imports.go31
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/imports_test.go80
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/map.go313
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/map_test.go174
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go72
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/ui.go52
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/ui_test.go61
82 files changed, 21238 insertions, 0 deletions
diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE
new file mode 100644
index 0000000..6a66aea
--- /dev/null
+++ b/vendor/golang.org/x/tools/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS
new file mode 100644
index 0000000..7330990
--- /dev/null
+++ b/vendor/golang.org/x/tools/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644
index 0000000..6b7052b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
@@ -0,0 +1,627 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// Precondition: [start, end) both lie within the same file as root.
+// TODO(adonovan): return (nil, false) in this case and remove precond.
+// Requires FileSet; see loader.tokenFileContainsPos.
+//
+// Postcondition: path is never nil; it always contains at least 'root'.
+//
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ _, isToken := child.(tokenNode)
+ return isToken || visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+//
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+//
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("}")))
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ children = append(children,
+ tok(n.Opening, len("(")),
+ tok(n.Closing, len(")")))
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("{")),
+ tok(n.Rbrack, len("}")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+//
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing_test.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing_test.go
new file mode 100644
index 0000000..107f87c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing_test.go
@@ -0,0 +1,195 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil_test
+
+// This file defines tests of PathEnclosingInterval.
+
+// TODO(adonovan): exhaustive tests that run over the whole input
+// tree, not just handcrafted examples.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// pathToString returns a string containing the concrete types of the
+// nodes in path.
+func pathToString(path []ast.Node) string {
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, "[")
+ for i, n := range path {
+ if i > 0 {
+ fmt.Fprint(&buf, " ")
+ }
+ fmt.Fprint(&buf, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast."))
+ }
+ fmt.Fprint(&buf, "]")
+ return buf.String()
+}
+
+// findInterval parses input and returns the [start, end) positions of
+// the first occurrence of substr in input. f==nil indicates failure;
+// an error has already been reported in that case.
+//
+func findInterval(t *testing.T, fset *token.FileSet, input, substr string) (f *ast.File, start, end token.Pos) {
+ f, err := parser.ParseFile(fset, "<input>", input, 0)
+ if err != nil {
+ t.Errorf("parse error: %s", err)
+ return
+ }
+
+ i := strings.Index(input, substr)
+ if i < 0 {
+ t.Errorf("%q is not a substring of input", substr)
+ f = nil
+ return
+ }
+
+ filePos := fset.File(f.Package)
+ return f, filePos.Pos(i), filePos.Pos(i + len(substr))
+}
+
+// Common input for following tests.
+const input = `
+// Hello.
+package main
+import "fmt"
+func f() {}
+func main() {
+ z := (x + y) // add them
+ f() // NB: ExprStmt and its CallExpr have same Pos/End
+}
+`
+
+func TestPathEnclosingInterval_Exact(t *testing.T) {
+ // For the exact tests, we check that a substring is mapped to
+ // the canonical string for the node it denotes.
+ tests := []struct {
+ substr string // first occurrence of this string indicates interval
+ node string // complete text of expected containing node
+ }{
+ {"package",
+ input[11 : len(input)-1]},
+ {"\npack",
+ input[11 : len(input)-1]},
+ {"main",
+ "main"},
+ {"import",
+ "import \"fmt\""},
+ {"\"fmt\"",
+ "\"fmt\""},
+ {"\nfunc f() {}\n",
+ "func f() {}"},
+ {"x ",
+ "x"},
+ {" y",
+ "y"},
+ {"z",
+ "z"},
+ {" + ",
+ "x + y"},
+ {" :=",
+ "z := (x + y)"},
+ {"x + y",
+ "x + y"},
+ {"(x + y)",
+ "(x + y)"},
+ {" (x + y) ",
+ "(x + y)"},
+ {" (x + y) // add",
+ "(x + y)"},
+ {"func",
+ "func f() {}"},
+ {"func f() {}",
+ "func f() {}"},
+ {"\nfun",
+ "func f() {}"},
+ {" f",
+ "f"},
+ }
+ for _, test := range tests {
+ f, start, end := findInterval(t, new(token.FileSet), input, test.substr)
+ if f == nil {
+ continue
+ }
+
+ path, exact := astutil.PathEnclosingInterval(f, start, end)
+ if !exact {
+ t.Errorf("PathEnclosingInterval(%q) not exact", test.substr)
+ continue
+ }
+
+ if len(path) == 0 {
+ if test.node != "" {
+ t.Errorf("PathEnclosingInterval(%q).path: got [], want %q",
+ test.substr, test.node)
+ }
+ continue
+ }
+
+ if got := input[path[0].Pos():path[0].End()]; got != test.node {
+ t.Errorf("PathEnclosingInterval(%q): got %q, want %q (path was %s)",
+ test.substr, got, test.node, pathToString(path))
+ continue
+ }
+ }
+}
+
+func TestPathEnclosingInterval_Paths(t *testing.T) {
+ // For these tests, we check only the path of the enclosing
+ // node, but not its complete text because it's often quite
+ // large when !exact.
+ tests := []struct {
+ substr string // first occurrence of this string indicates interval
+ path string // the pathToString(),exact of the expected path
+ }{
+ {"// add",
+ "[BlockStmt FuncDecl File],false"},
+ {"(x + y",
+ "[ParenExpr AssignStmt BlockStmt FuncDecl File],false"},
+ {"x +",
+ "[BinaryExpr ParenExpr AssignStmt BlockStmt FuncDecl File],false"},
+ {"z := (x",
+ "[AssignStmt BlockStmt FuncDecl File],false"},
+ {"func f",
+ "[FuncDecl File],false"},
+ {"func f()",
+ "[FuncDecl File],false"},
+ {" f()",
+ "[FuncDecl File],false"},
+ {"() {}",
+ "[FuncDecl File],false"},
+ {"// Hello",
+ "[File],false"},
+ {" f",
+ "[Ident FuncDecl File],true"},
+ {"func ",
+ "[FuncDecl File],true"},
+ {"mai",
+ "[Ident File],true"},
+ {"f() // NB",
+ "[CallExpr ExprStmt BlockStmt FuncDecl File],true"},
+ }
+ for _, test := range tests {
+ f, start, end := findInterval(t, new(token.FileSet), input, test.substr)
+ if f == nil {
+ continue
+ }
+
+ path, exact := astutil.PathEnclosingInterval(f, start, end)
+ if got := fmt.Sprintf("%s,%v", pathToString(path), exact); got != test.path {
+ t.Errorf("PathEnclosingInterval(%q): got %q, want %q",
+ test.substr, got, test.path)
+ continue
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644
index 0000000..83f196c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -0,0 +1,470 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) {
+ return AddNamedImport(fset, f, "", ipath)
+}
+
+// AddNamedImport adds the import path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+// AddNamedImport(fset, f, "pathpkg", "path")
+// adds
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) {
+ if imports(f, ipath) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(ipath),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with ipath.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(ipath)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, ipath)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import goes after the package declaration and after
+ // the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ impDecl.TokPos = c.End()
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var delspecs []*ast.ImportSpec
+ var delcomments []*ast.CommentGroup
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ spec := gen.Specs[j]
+ impspec := spec.(*ast.ImportSpec)
+ if impspec.Name == nil && name != "" {
+ continue
+ }
+ if impspec.Name != nil && impspec.Name.Name != name {
+ continue
+ }
+ if importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs = append(delspecs, impspec)
+ deleted = true
+ copy(gen.Specs[j:], gen.Specs[j+1:])
+ gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ copy(f.Decls[i:], f.Decls[i+1:])
+ f.Decls = f.Decls[:len(f.Decls)-1]
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments = append(delcomments, impspec.Doc)
+ }
+ if impspec.Comment != nil {
+ delcomments = append(delcomments, impspec.Comment)
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments = append(delcomments, cg)
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
+ line := fset.Position(impspec.Path.ValuePos).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ for i := 0; i < len(f.Imports); i++ {
+ imp := f.Imports[i]
+ for j, del := range delspecs {
+ if imp == del {
+ copy(f.Imports[i:], f.Imports[i+1:])
+ f.Imports = f.Imports[:len(f.Imports)-1]
+ copy(delspecs[j:], delspecs[j+1:])
+ delspecs = delspecs[:len(delspecs)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ // Delete comments from f.Comments.
+ for i := 0; i < len(f.Comments); i++ {
+ cg := f.Comments[i]
+ for j, del := range delcomments {
+ if cg == del {
+ copy(f.Comments[i:], f.Comments[i+1:])
+ f.Comments = f.Comments[:len(f.Comments)-1]
+ copy(delcomments[j:], delcomments[j+1:])
+ delcomments = delcomments[:len(delcomments)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ if len(delspecs) > 0 {
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "<nil>":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports returns true if f imports path.
+func imports(f *ast.File, path string) bool {
+ return importSpec(f, path) != nil
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err == nil {
+ return t
+ }
+ return ""
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports_test.go b/vendor/golang.org/x/tools/go/ast/astutil/imports_test.go
new file mode 100644
index 0000000..8bc3480
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports_test.go
@@ -0,0 +1,1818 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "bytes"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "reflect"
+ "strconv"
+ "testing"
+)
+
+var fset = token.NewFileSet()
+
+func parse(t *testing.T, name, in string) *ast.File {
+ file, err := parser.ParseFile(fset, name, in, parser.ParseComments)
+ if err != nil {
+ t.Fatalf("%s parse: %v", name, err)
+ }
+ return file
+}
+
+func print(t *testing.T, name string, f *ast.File) string {
+ var buf bytes.Buffer
+ if err := format.Node(&buf, fset, f); err != nil {
+ t.Fatalf("%s gofmt: %v", name, err)
+ }
+ return string(buf.Bytes())
+}
+
+type test struct {
+ name string
+ renamedPkg string
+ pkg string
+ in string
+ out string
+ broken bool // known broken
+}
+
+var addTests = []test{
+ {
+ name: "leave os alone",
+ pkg: "os",
+ in: `package main
+
+import (
+ "os"
+)
+`,
+ out: `package main
+
+import (
+ "os"
+)
+`,
+ },
+ {
+ name: "import.1",
+ pkg: "os",
+ in: `package main
+`,
+ out: `package main
+
+import "os"
+`,
+ },
+ {
+ name: "import.2",
+ pkg: "os",
+ in: `package main
+
+// Comment
+import "C"
+`,
+ out: `package main
+
+// Comment
+import "C"
+import "os"
+`,
+ },
+ {
+ name: "import.3",
+ pkg: "os",
+ in: `package main
+
+// Comment
+import "C"
+
+import (
+ "io"
+ "utf8"
+)
+`,
+ out: `package main
+
+// Comment
+import "C"
+
+import (
+ "io"
+ "os"
+ "utf8"
+)
+`,
+ },
+ {
+ name: "import.17",
+ pkg: "x/y/z",
+ in: `package main
+
+// Comment
+import "C"
+
+import (
+ "a"
+ "b"
+
+ "x/w"
+
+ "d/f"
+)
+`,
+ out: `package main
+
+// Comment
+import "C"
+
+import (
+ "a"
+ "b"
+
+ "x/w"
+ "x/y/z"
+
+ "d/f"
+)
+`,
+ },
+ {
+ name: "issue #19190",
+ pkg: "x.org/y/z",
+ in: `package main
+
+// Comment
+import "C"
+
+import (
+ "bytes"
+ "os"
+
+ "d.com/f"
+)
+`,
+ out: `package main
+
+// Comment
+import "C"
+
+import (
+ "bytes"
+ "os"
+
+ "d.com/f"
+ "x.org/y/z"
+)
+`,
+ },
+ {
+ name: "issue #19190 with existing grouped import packages",
+ pkg: "x.org/y/z",
+ in: `package main
+
+// Comment
+import "C"
+
+import (
+ "bytes"
+ "os"
+
+ "c.com/f"
+ "d.com/f"
+
+ "y.com/a"
+ "y.com/b"
+ "y.com/c"
+)
+`,
+ out: `package main
+
+// Comment
+import "C"
+
+import (
+ "bytes"
+ "os"
+
+ "c.com/f"
+ "d.com/f"
+ "x.org/y/z"
+
+ "y.com/a"
+ "y.com/b"
+ "y.com/c"
+)
+`,
+ },
+ {
+ name: "issue #19190 - match score is still respected",
+ pkg: "y.org/c",
+ in: `package main
+
+import (
+ "x.org/a"
+
+ "y.org/b"
+)
+`,
+ out: `package main
+
+import (
+ "x.org/a"
+
+ "y.org/b"
+ "y.org/c"
+)
+`,
+ },
+ {
+ name: "import into singular group",
+ pkg: "bytes",
+ in: `package main
+
+import "os"
+
+`,
+ out: `package main
+
+import (
+ "bytes"
+ "os"
+)
+`,
+ },
+ {
+ name: "import into singular group with comment",
+ pkg: "bytes",
+ in: `package main
+
+import /* why */ /* comment here? */ "os"
+
+`,
+ out: `package main
+
+import /* why */ /* comment here? */ (
+ "bytes"
+ "os"
+)
+`,
+ },
+ {
+ name: "import into group with leading comment",
+ pkg: "strings",
+ in: `package main
+
+import (
+ // comment before bytes
+ "bytes"
+ "os"
+)
+
+`,
+ out: `package main
+
+import (
+ // comment before bytes
+ "bytes"
+ "os"
+ "strings"
+)
+`,
+ },
+ {
+ name: "",
+ renamedPkg: "fmtpkg",
+ pkg: "fmt",
+ in: `package main
+
+import "os"
+
+`,
+ out: `package main
+
+import (
+ fmtpkg "fmt"
+ "os"
+)
+`,
+ },
+ {
+ name: "struct comment",
+ pkg: "time",
+ in: `package main
+
+// This is a comment before a struct.
+type T struct {
+ t time.Time
+}
+`,
+ out: `package main
+
+import "time"
+
+// This is a comment before a struct.
+type T struct {
+ t time.Time
+}
+`,
+ },
+ {
+ name: "issue 8729 import C",
+ pkg: "time",
+ in: `package main
+
+import "C"
+
+// comment
+type T time.Time
+`,
+ out: `package main
+
+import "C"
+import "time"
+
+// comment
+type T time.Time
+`,
+ },
+ {
+ name: "issue 8729 empty import",
+ pkg: "time",
+ in: `package main
+
+import ()
+
+// comment
+type T time.Time
+`,
+ out: `package main
+
+import "time"
+
+// comment
+type T time.Time
+`,
+ },
+ {
+ name: "issue 8729 comment on package line",
+ pkg: "time",
+ in: `package main // comment
+
+type T time.Time
+`,
+ out: `package main // comment
+import "time"
+
+type T time.Time
+`,
+ },
+ {
+ name: "issue 8729 comment after package",
+ pkg: "time",
+ in: `package main
+// comment
+
+type T time.Time
+`,
+ out: `package main
+
+import "time"
+
+// comment
+
+type T time.Time
+`,
+ },
+ {
+ name: "issue 8729 comment before and on package line",
+ pkg: "time",
+ in: `// comment before
+package main // comment on
+
+type T time.Time
+`,
+ out: `// comment before
+package main // comment on
+import "time"
+
+type T time.Time
+`,
+ },
+
+ // Issue 9961: Match prefixes using path segments rather than bytes
+ {
+ name: "issue 9961",
+ pkg: "regexp",
+ in: `package main
+
+import (
+ "flag"
+ "testing"
+
+ "rsc.io/p"
+)
+`,
+ out: `package main
+
+import (
+ "flag"
+ "regexp"
+ "testing"
+
+ "rsc.io/p"
+)
+`,
+ },
+ // Issue 10337: Preserve comment position
+ {
+ name: "issue 10337",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "bytes" // a
+ "log" // c
+)
+`,
+ out: `package main
+
+import (
+ "bytes" // a
+ "fmt"
+ "log" // c
+)
+`,
+ },
+ {
+ name: "issue 10337 new import at the start",
+ pkg: "bytes",
+ in: `package main
+
+import (
+ "fmt" // b
+ "log" // c
+)
+`,
+ out: `package main
+
+import (
+ "bytes"
+ "fmt" // b
+ "log" // c
+)
+`,
+ },
+ {
+ name: "issue 10337 new import at the end",
+ pkg: "log",
+ in: `package main
+
+import (
+ "bytes" // a
+ "fmt" // b
+)
+`,
+ out: `package main
+
+import (
+ "bytes" // a
+ "fmt" // b
+ "log"
+)
+`,
+ },
+ // Issue 14075: Merge import declarations
+ {
+ name: "issue 14075",
+ pkg: "bufio",
+ in: `package main
+
+import "bytes"
+import "fmt"
+`,
+ out: `package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+)
+`,
+ },
+ {
+ name: "issue 14075 update position",
+ pkg: "bufio",
+ in: `package main
+
+import "bytes"
+import (
+ "fmt"
+)
+`,
+ out: `package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+)
+`,
+ },
+ {
+ name: `issue 14075 ignore import "C"`,
+ pkg: "bufio",
+ in: `package main
+
+// Comment
+import "C"
+
+import "bytes"
+import "fmt"
+`,
+ out: `package main
+
+// Comment
+import "C"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+)
+`,
+ },
+ {
+ name: `issue 14075 ignore adjacent import "C"`,
+ pkg: "bufio",
+ in: `package main
+
+// Comment
+import "C"
+import "fmt"
+`,
+ out: `package main
+
+// Comment
+import "C"
+import (
+ "bufio"
+ "fmt"
+)
+`,
+ },
+ {
+ name: `issue 14075 ignore adjacent import "C" (without factored import)`,
+ pkg: "bufio",
+ in: `package main
+
+// Comment
+import "C"
+import "fmt"
+`,
+ out: `package main
+
+// Comment
+import "C"
+import (
+ "bufio"
+ "fmt"
+)
+`,
+ },
+ {
+ name: `issue 14075 ignore single import "C"`,
+ pkg: "bufio",
+ in: `package main
+
+// Comment
+import "C"
+`,
+ out: `package main
+
+// Comment
+import "C"
+import "bufio"
+`,
+ },
+ {
+ name: `issue 17212 several single-import lines with shared prefix ending in a slash`,
+ pkg: "net/http",
+ in: `package main
+
+import "bufio"
+import "net/url"
+`,
+ out: `package main
+
+import (
+ "bufio"
+ "net/http"
+ "net/url"
+)
+`,
+ },
+ {
+ name: `issue 17212 block imports lines with shared prefix ending in a slash`,
+ pkg: "net/http",
+ in: `package main
+
+import (
+ "bufio"
+ "net/url"
+)
+`,
+ out: `package main
+
+import (
+ "bufio"
+ "net/http"
+ "net/url"
+)
+`,
+ },
+ {
+ name: `issue 17213 many single-import lines`,
+ pkg: "fmt",
+ in: `package main
+
+import "bufio"
+import "bytes"
+import "errors"
+`,
+ out: `package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+)
+`,
+ },
+}
+
+func TestAddImport(t *testing.T) {
+ for _, test := range addTests {
+ file := parse(t, test.name, test.in)
+ var before bytes.Buffer
+ ast.Fprint(&before, fset, file, nil)
+ AddNamedImport(fset, file, test.renamedPkg, test.pkg)
+ if got := print(t, test.name, file); got != test.out {
+ if test.broken {
+ t.Logf("%s is known broken:\ngot: %s\nwant: %s", test.name, got, test.out)
+ } else {
+ t.Errorf("%s:\ngot: %s\nwant: %s", test.name, got, test.out)
+ }
+ var after bytes.Buffer
+ ast.Fprint(&after, fset, file, nil)
+
+ t.Logf("AST before:\n%s\nAST after:\n%s\n", before.String(), after.String())
+ }
+ }
+}
+
+func TestDoubleAddImport(t *testing.T) {
+ file := parse(t, "doubleimport", "package main\n")
+ AddImport(fset, file, "os")
+ AddImport(fset, file, "bytes")
+ want := `package main
+
+import (
+ "bytes"
+ "os"
+)
+`
+ if got := print(t, "doubleimport", file); got != want {
+ t.Errorf("got: %s\nwant: %s", got, want)
+ }
+}
+
+func TestDoubleAddNamedImport(t *testing.T) {
+ file := parse(t, "doublenamedimport", "package main\n")
+ AddNamedImport(fset, file, "o", "os")
+ AddNamedImport(fset, file, "i", "io")
+ want := `package main
+
+import (
+ i "io"
+ o "os"
+)
+`
+ if got := print(t, "doublenamedimport", file); got != want {
+ t.Errorf("got: %s\nwant: %s", got, want)
+ }
+}
+
+// Part of issue 8729.
+func TestDoubleAddImportWithDeclComment(t *testing.T) {
+ file := parse(t, "doubleimport", `package main
+
+import (
+)
+
+// comment
+type I int
+`)
+ // The AddImport order here matters.
+ AddImport(fset, file, "golang.org/x/tools/go/ast/astutil")
+ AddImport(fset, file, "os")
+ want := `package main
+
+import (
+ "golang.org/x/tools/go/ast/astutil"
+ "os"
+)
+
+// comment
+type I int
+`
+ if got := print(t, "doubleimport_with_decl_comment", file); got != want {
+ t.Errorf("got: %s\nwant: %s", got, want)
+ }
+}
+
+var deleteTests = []test{
+ {
+ name: "import.4",
+ pkg: "os",
+ in: `package main
+
+import (
+ "os"
+)
+`,
+ out: `package main
+`,
+ },
+ {
+ name: "import.5",
+ pkg: "os",
+ in: `package main
+
+// Comment
+import "C"
+import "os"
+`,
+ out: `package main
+
+// Comment
+import "C"
+`,
+ },
+ {
+ name: "import.6",
+ pkg: "os",
+ in: `package main
+
+// Comment
+import "C"
+
+import (
+ "io"
+ "os"
+ "utf8"
+)
+`,
+ out: `package main
+
+// Comment
+import "C"
+
+import (
+ "io"
+ "utf8"
+)
+`,
+ },
+ {
+ name: "import.7",
+ pkg: "io",
+ in: `package main
+
+import (
+ "io" // a
+ "os" // b
+ "utf8" // c
+)
+`,
+ out: `package main
+
+import (
+ // a
+ "os" // b
+ "utf8" // c
+)
+`,
+ },
+ {
+ name: "import.8",
+ pkg: "os",
+ in: `package main
+
+import (
+ "io" // a
+ "os" // b
+ "utf8" // c
+)
+`,
+ out: `package main
+
+import (
+ "io" // a
+ // b
+ "utf8" // c
+)
+`,
+ },
+ {
+ name: "import.9",
+ pkg: "utf8",
+ in: `package main
+
+import (
+ "io" // a
+ "os" // b
+ "utf8" // c
+)
+`,
+ out: `package main
+
+import (
+ "io" // a
+ "os" // b
+ // c
+)
+`,
+ },
+ {
+ name: "import.10",
+ pkg: "io",
+ in: `package main
+
+import (
+ "io"
+ "os"
+ "utf8"
+)
+`,
+ out: `package main
+
+import (
+ "os"
+ "utf8"
+)
+`,
+ },
+ {
+ name: "import.11",
+ pkg: "os",
+ in: `package main
+
+import (
+ "io"
+ "os"
+ "utf8"
+)
+`,
+ out: `package main
+
+import (
+ "io"
+ "utf8"
+)
+`,
+ },
+ {
+ name: "import.12",
+ pkg: "utf8",
+ in: `package main
+
+import (
+ "io"
+ "os"
+ "utf8"
+)
+`,
+ out: `package main
+
+import (
+ "io"
+ "os"
+)
+`,
+ },
+ {
+ name: "handle.raw.quote.imports",
+ pkg: "os",
+ in: "package main\n\nimport `os`",
+ out: `package main
+`,
+ },
+ {
+ name: "import.13",
+ pkg: "io",
+ in: `package main
+
+import (
+ "fmt"
+
+ "io"
+ "os"
+ "utf8"
+
+ "go/format"
+)
+`,
+ out: `package main
+
+import (
+ "fmt"
+
+ "os"
+ "utf8"
+
+ "go/format"
+)
+`,
+ },
+ {
+ name: "import.14",
+ pkg: "io",
+ in: `package main
+
+import (
+ "fmt" // a
+
+ "io" // b
+ "os" // c
+ "utf8" // d
+
+ "go/format" // e
+)
+`,
+ out: `package main
+
+import (
+ "fmt" // a
+
+ // b
+ "os" // c
+ "utf8" // d
+
+ "go/format" // e
+)
+`,
+ },
+ {
+ name: "import.15",
+ pkg: "double",
+ in: `package main
+
+import (
+ "double"
+ "double"
+)
+`,
+ out: `package main
+`,
+ },
+ {
+ name: "import.16",
+ pkg: "bubble",
+ in: `package main
+
+import (
+ "toil"
+ "bubble"
+ "bubble"
+ "trouble"
+)
+`,
+ out: `package main
+
+import (
+ "toil"
+ "trouble"
+)
+`,
+ },
+ {
+ name: "import.17",
+ pkg: "quad",
+ in: `package main
+
+import (
+ "quad"
+ "quad"
+)
+
+import (
+ "quad"
+ "quad"
+)
+`,
+ out: `package main
+`,
+ },
+ {
+ name: "import.18",
+ renamedPkg: "x",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt"
+ x "fmt"
+)
+`,
+ out: `package main
+
+import (
+ "fmt"
+)
+`,
+ },
+ {
+ name: "import.18",
+ renamedPkg: "x",
+ pkg: "fmt",
+ in: `package main
+
+import x "fmt"
+import y "fmt"
+`,
+ out: `package main
+
+import y "fmt"
+`,
+ },
+ // Issue #15432, #18051
+ {
+ name: "import.19",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt"
+
+ // Some comment.
+ "io"
+)`,
+ out: `package main
+
+import (
+ // Some comment.
+ "io"
+)
+`,
+ },
+ {
+ name: "import.20",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt"
+
+ // Some
+ // comment.
+ "io"
+)`,
+ out: `package main
+
+import (
+ // Some
+ // comment.
+ "io"
+)
+`,
+ },
+ {
+ name: "import.21",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt"
+
+ /*
+ Some
+ comment.
+ */
+ "io"
+)`,
+ out: `package main
+
+import (
+ /*
+ Some
+ comment.
+ */
+ "io"
+)
+`,
+ },
+ {
+ name: "import.22",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ /* Some */
+ // comment.
+ "io"
+ "fmt"
+)`,
+ out: `package main
+
+import (
+ /* Some */
+ // comment.
+ "io"
+)
+`,
+ },
+ {
+ name: "import.23",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ // comment 1
+ "fmt"
+ // comment 2
+ "io"
+)`,
+ out: `package main
+
+import (
+ // comment 2
+ "io"
+)
+`,
+ },
+ {
+ name: "import.24",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt" // comment 1
+ "io" // comment 2
+)`,
+ out: `package main
+
+import (
+ "io" // comment 2
+)
+`,
+ },
+ {
+ name: "import.25",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt"
+ /* comment */ "io"
+)`,
+ out: `package main
+
+import (
+ /* comment */ "io"
+)
+`,
+ },
+ {
+ name: "import.26",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt"
+ "io" /* comment */
+)`,
+ out: `package main
+
+import (
+ "io" /* comment */
+)
+`,
+ },
+ {
+ name: "import.27",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ "fmt" /* comment */
+ "io"
+)`,
+ out: `package main
+
+import (
+ "io"
+)
+`,
+ },
+ {
+ name: "import.28",
+ pkg: "fmt",
+ in: `package main
+
+import (
+ /* comment */ "fmt"
+ "io"
+)`,
+ out: `package main
+
+import (
+ "io"
+)
+`,
+ },
+ {
+ name: "import.29",
+ pkg: "fmt",
+ in: `package main
+
+// comment 1
+import (
+ "fmt"
+ "io" // comment 2
+)`,
+ out: `package main
+
+// comment 1
+import (
+ "io" // comment 2
+)
+`,
+ },
+ {
+ name: "import.30",
+ pkg: "fmt",
+ in: `package main
+
+// comment 1
+import (
+ "fmt" // comment 2
+ "io"
+)`,
+ out: `package main
+
+// comment 1
+import (
+ "io"
+)
+`,
+ },
+ {
+ name: "import.31",
+ pkg: "fmt",
+ in: `package main
+
+// comment 1
+import (
+ "fmt"
+ /* comment 2 */ "io"
+)`,
+ out: `package main
+
+// comment 1
+import (
+ /* comment 2 */ "io"
+)
+`,
+ },
+ {
+ name: "import.32",
+ pkg: "fmt",
+ renamedPkg: "f",
+ in: `package main
+
+// comment 1
+import (
+ f "fmt"
+ /* comment 2 */ i "io"
+)`,
+ out: `package main
+
+// comment 1
+import (
+ /* comment 2 */ i "io"
+)
+`,
+ },
+ {
+ name: "import.33",
+ pkg: "fmt",
+ renamedPkg: "f",
+ in: `package main
+
+// comment 1
+import (
+ /* comment 2 */ f "fmt"
+ i "io"
+)`,
+ out: `package main
+
+// comment 1
+import (
+ i "io"
+)
+`,
+ },
+ {
+ name: "import.34",
+ pkg: "fmt",
+ renamedPkg: "f",
+ in: `package main
+
+// comment 1
+import (
+ f "fmt" /* comment 2 */
+ i "io"
+)`,
+ out: `package main
+
+// comment 1
+import (
+ i "io"
+)
+`,
+ },
+ {
+ name: "import.35",
+ pkg: "fmt",
+ in: `package main
+
+// comment 1
+import (
+ "fmt"
+ // comment 2
+ "io"
+)`,
+ out: `package main
+
+// comment 1
+import (
+ // comment 2
+ "io"
+)
+`,
+ },
+ {
+ name: "import.36",
+ pkg: "fmt",
+ in: `package main
+
+/* comment 1 */
+import (
+ "fmt"
+ /* comment 2 */
+ "io"
+)`,
+ out: `package main
+
+/* comment 1 */
+import (
+ /* comment 2 */
+ "io"
+)
+`,
+ },
+
+ // Issue 20229: MergeLine panic on weird input
+ {
+ name: "import.37",
+ pkg: "io",
+ in: `package main
+import("_"
+"io")`,
+ out: `package main
+
+import (
+ "_"
+)
+`,
+ },
+}
+
+func TestDeleteImport(t *testing.T) {
+ for _, test := range deleteTests {
+ file := parse(t, test.name, test.in)
+ DeleteNamedImport(fset, file, test.renamedPkg, test.pkg)
+ if got := print(t, test.name, file); got != test.out {
+ t.Errorf("%s:\ngot: %s\nwant: %s", test.name, got, test.out)
+ }
+ }
+}
+
+type rewriteTest struct {
+ name string
+ srcPkg string
+ dstPkg string
+ in string
+ out string
+}
+
+var rewriteTests = []rewriteTest{
+ {
+ name: "import.13",
+ srcPkg: "utf8",
+ dstPkg: "encoding/utf8",
+ in: `package main
+
+import (
+ "io"
+ "os"
+ "utf8" // thanks ken
+)
+`,
+ out: `package main
+
+import (
+ "encoding/utf8" // thanks ken
+ "io"
+ "os"
+)
+`,
+ },
+ {
+ name: "import.14",
+ srcPkg: "asn1",
+ dstPkg: "encoding/asn1",
+ in: `package main
+
+import (
+ "asn1"
+ "crypto"
+ "crypto/rsa"
+ _ "crypto/sha1"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "time"
+)
+
+var x = 1
+`,
+ out: `package main
+
+import (
+ "crypto"
+ "crypto/rsa"
+ _ "crypto/sha1"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "time"
+)
+
+var x = 1
+`,
+ },
+ {
+ name: "import.15",
+ srcPkg: "url",
+ dstPkg: "net/url",
+ in: `package main
+
+import (
+ "bufio"
+ "net"
+ "path"
+ "url"
+)
+
+var x = 1 // comment on x, not on url
+`,
+ out: `package main
+
+import (
+ "bufio"
+ "net"
+ "net/url"
+ "path"
+)
+
+var x = 1 // comment on x, not on url
+`,
+ },
+ {
+ name: "import.16",
+ srcPkg: "http",
+ dstPkg: "net/http",
+ in: `package main
+
+import (
+ "flag"
+ "http"
+ "log"
+ "text/template"
+)
+
+var addr = flag.String("addr", ":1718", "http service address") // Q=17, R=18
+`,
+ out: `package main
+
+import (
+ "flag"
+ "log"
+ "net/http"
+ "text/template"
+)
+
+var addr = flag.String("addr", ":1718", "http service address") // Q=17, R=18
+`,
+ },
+}
+
+func TestRewriteImport(t *testing.T) {
+ for _, test := range rewriteTests {
+ file := parse(t, test.name, test.in)
+ RewriteImport(fset, file, test.srcPkg, test.dstPkg)
+ if got := print(t, test.name, file); got != test.out {
+ t.Errorf("%s:\ngot: %s\nwant: %s", test.name, got, test.out)
+ }
+ }
+}
+
+var importsTests = []struct {
+ name string
+ in string
+ want [][]string
+}{
+ {
+ name: "no packages",
+ in: `package foo
+`,
+ want: nil,
+ },
+ {
+ name: "one group",
+ in: `package foo
+
+import (
+ "fmt"
+ "testing"
+)
+`,
+ want: [][]string{{"fmt", "testing"}},
+ },
+ {
+ name: "four groups",
+ in: `package foo
+
+import "C"
+import (
+ "fmt"
+ "testing"
+
+ "appengine"
+
+ "myproject/mylib1"
+ "myproject/mylib2"
+)
+`,
+ want: [][]string{
+ {"C"},
+ {"fmt", "testing"},
+ {"appengine"},
+ {"myproject/mylib1", "myproject/mylib2"},
+ },
+ },
+ {
+ name: "multiple factored groups",
+ in: `package foo
+
+import (
+ "fmt"
+ "testing"
+
+ "appengine"
+)
+import (
+ "reflect"
+
+ "bytes"
+)
+`,
+ want: [][]string{
+ {"fmt", "testing"},
+ {"appengine"},
+ {"reflect"},
+ {"bytes"},
+ },
+ },
+}
+
+func unquote(s string) string {
+ res, err := strconv.Unquote(s)
+ if err != nil {
+ return "could_not_unquote"
+ }
+ return res
+}
+
+func TestImports(t *testing.T) {
+ fset := token.NewFileSet()
+ for _, test := range importsTests {
+ f, err := parser.ParseFile(fset, "test.go", test.in, 0)
+ if err != nil {
+ t.Errorf("%s: %v", test.name, err)
+ continue
+ }
+ var got [][]string
+ for _, group := range Imports(fset, f) {
+ var b []string
+ for _, spec := range group {
+ b = append(b, unquote(spec.Path.Value))
+ }
+ got = append(got, b)
+ }
+ if !reflect.DeepEqual(got, test.want) {
+ t.Errorf("Imports(%s)=%v, want %v", test.name, got, test.want)
+ }
+ }
+}
+
+var usesImportTests = []struct {
+ name string
+ path string
+ in string
+ want bool
+}{
+ {
+ name: "no packages",
+ path: "io",
+ in: `package foo
+`,
+ want: false,
+ },
+ {
+ name: "import.1",
+ path: "io",
+ in: `package foo
+
+import "io"
+
+var _ io.Writer
+`,
+ want: true,
+ },
+ {
+ name: "import.2",
+ path: "io",
+ in: `package foo
+
+import "io"
+`,
+ want: false,
+ },
+ {
+ name: "import.3",
+ path: "io",
+ in: `package foo
+
+import "io"
+
+var io = 42
+`,
+ want: false,
+ },
+ {
+ name: "import.4",
+ path: "io",
+ in: `package foo
+
+import i "io"
+
+var _ i.Writer
+`,
+ want: true,
+ },
+ {
+ name: "import.5",
+ path: "io",
+ in: `package foo
+
+import i "io"
+`,
+ want: false,
+ },
+ {
+ name: "import.6",
+ path: "io",
+ in: `package foo
+
+import i "io"
+
+var i = 42
+var io = 42
+`,
+ want: false,
+ },
+ {
+ name: "import.7",
+ path: "encoding/json",
+ in: `package foo
+
+import "encoding/json"
+
+var _ json.Encoder
+`,
+ want: true,
+ },
+ {
+ name: "import.8",
+ path: "encoding/json",
+ in: `package foo
+
+import "encoding/json"
+`,
+ want: false,
+ },
+ {
+ name: "import.9",
+ path: "encoding/json",
+ in: `package foo
+
+import "encoding/json"
+
+var json = 42
+`,
+ want: false,
+ },
+ {
+ name: "import.10",
+ path: "encoding/json",
+ in: `package foo
+
+import j "encoding/json"
+
+var _ j.Encoder
+`,
+ want: true,
+ },
+ {
+ name: "import.11",
+ path: "encoding/json",
+ in: `package foo
+
+import j "encoding/json"
+`,
+ want: false,
+ },
+ {
+ name: "import.12",
+ path: "encoding/json",
+ in: `package foo
+
+import j "encoding/json"
+
+var j = 42
+var json = 42
+`,
+ want: false,
+ },
+ {
+ name: "import.13",
+ path: "io",
+ in: `package foo
+
+import _ "io"
+`,
+ want: true,
+ },
+ {
+ name: "import.14",
+ path: "io",
+ in: `package foo
+
+import . "io"
+`,
+ want: true,
+ },
+}
+
+func TestUsesImport(t *testing.T) {
+ fset := token.NewFileSet()
+ for _, test := range usesImportTests {
+ f, err := parser.ParseFile(fset, "test.go", test.in, 0)
+ if err != nil {
+ t.Errorf("%s: %v", test.name, err)
+ continue
+ }
+ got := UsesImport(f, test.path)
+ if got != test.want {
+ t.Errorf("UsesImport(%s)=%v, want %v", test.name, got, test.want)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644
index 0000000..cf72ea9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -0,0 +1,477 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+//
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite_test.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite_test.go
new file mode 100644
index 0000000..1c86970
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite_test.go
@@ -0,0 +1,248 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil_test
+
+import (
+ "bytes"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "testing"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+var rewriteTests = [...]struct {
+ name string
+ orig, want string
+ pre, post astutil.ApplyFunc
+}{
+ {name: "nop", orig: "package p\n", want: "package p\n"},
+
+ {name: "replace",
+ orig: `package p
+
+var x int
+`,
+ want: `package p
+
+var t T
+`,
+ post: func(c *astutil.Cursor) bool {
+ if _, ok := c.Node().(*ast.ValueSpec); ok {
+ c.Replace(valspec("t", "T"))
+ return false
+ }
+ return true
+ },
+ },
+
+ {name: "set doc strings",
+ orig: `package p
+
+const z = 0
+
+type T struct{}
+
+var x int
+`,
+ want: `package p
+// a foo is a foo
+const z = 0
+// a foo is a foo
+type T struct{}
+// a foo is a foo
+var x int
+`,
+ post: func(c *astutil.Cursor) bool {
+ if _, ok := c.Parent().(*ast.GenDecl); ok && c.Name() == "Doc" && c.Node() == nil {
+ c.Replace(&ast.CommentGroup{List: []*ast.Comment{{Text: "// a foo is a foo"}}})
+ }
+ return true
+ },
+ },
+
+ {name: "insert names",
+ orig: `package p
+
+const a = 1
+`,
+ want: `package p
+
+const a, b, c = 1, 2, 3
+`,
+ pre: func(c *astutil.Cursor) bool {
+ if _, ok := c.Parent().(*ast.ValueSpec); ok {
+ switch c.Name() {
+ case "Names":
+ c.InsertAfter(ast.NewIdent("c"))
+ c.InsertAfter(ast.NewIdent("b"))
+ case "Values":
+ c.InsertAfter(&ast.BasicLit{Kind: token.INT, Value: "3"})
+ c.InsertAfter(&ast.BasicLit{Kind: token.INT, Value: "2"})
+ }
+ }
+ return true
+ },
+ },
+
+ {name: "insert",
+ orig: `package p
+
+var (
+ x int
+ y int
+)
+`,
+ want: `package p
+
+var before1 int
+var before2 int
+
+var (
+ x int
+ y int
+)
+var after2 int
+var after1 int
+`,
+ pre: func(c *astutil.Cursor) bool {
+ if _, ok := c.Node().(*ast.GenDecl); ok {
+ c.InsertBefore(vardecl("before1", "int"))
+ c.InsertAfter(vardecl("after1", "int"))
+ c.InsertAfter(vardecl("after2", "int"))
+ c.InsertBefore(vardecl("before2", "int"))
+ }
+ return true
+ },
+ },
+
+ {name: "delete",
+ orig: `package p
+
+var x int
+var y int
+var z int
+`,
+ want: `package p
+
+var y int
+var z int
+`,
+ pre: func(c *astutil.Cursor) bool {
+ n := c.Node()
+ if d, ok := n.(*ast.GenDecl); ok && d.Specs[0].(*ast.ValueSpec).Names[0].Name == "x" {
+ c.Delete()
+ }
+ return true
+ },
+ },
+
+ {name: "insertafter-delete",
+ orig: `package p
+
+var x int
+var y int
+var z int
+`,
+ want: `package p
+
+var x1 int
+
+var y int
+var z int
+`,
+ pre: func(c *astutil.Cursor) bool {
+ n := c.Node()
+ if d, ok := n.(*ast.GenDecl); ok && d.Specs[0].(*ast.ValueSpec).Names[0].Name == "x" {
+ c.InsertAfter(vardecl("x1", "int"))
+ c.Delete()
+ }
+ return true
+ },
+ },
+
+ {name: "delete-insertafter",
+ orig: `package p
+
+var x int
+var y int
+var z int
+`,
+ want: `package p
+
+var y int
+var x1 int
+var z int
+`,
+ pre: func(c *astutil.Cursor) bool {
+ n := c.Node()
+ if d, ok := n.(*ast.GenDecl); ok && d.Specs[0].(*ast.ValueSpec).Names[0].Name == "x" {
+ c.Delete()
+ // The cursor is now effectively atop the 'var y int' node.
+ c.InsertAfter(vardecl("x1", "int"))
+ }
+ return true
+ },
+ },
+}
+
+func valspec(name, typ string) *ast.ValueSpec {
+ return &ast.ValueSpec{Names: []*ast.Ident{ast.NewIdent(name)},
+ Type: ast.NewIdent(typ),
+ }
+}
+
+func vardecl(name, typ string) *ast.GenDecl {
+ return &ast.GenDecl{
+ Tok: token.VAR,
+ Specs: []ast.Spec{valspec(name, typ)},
+ }
+}
+
+func TestRewrite(t *testing.T) {
+ t.Run("*", func(t *testing.T) {
+ for _, test := range rewriteTests {
+ test := test
+ t.Run(test.name, func(t *testing.T) {
+ t.Parallel()
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, test.name, test.orig, parser.ParseComments)
+ if err != nil {
+ t.Fatal(err)
+ }
+ n := astutil.Apply(f, test.pre, test.post)
+ var buf bytes.Buffer
+ if err := format.Node(&buf, fset, n); err != nil {
+ t.Fatal(err)
+ }
+ got := buf.String()
+ if got != test.want {
+ t.Errorf("got:\n\n%s\nwant:\n\n%s\n", got, test.want)
+ }
+ })
+ }
+ })
+}
+
+var sink ast.Node
+
+func BenchmarkRewrite(b *testing.B) {
+ for _, test := range rewriteTests {
+ b.Run(test.name, func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ b.StopTimer()
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, test.name, test.orig, parser.ParseComments)
+ if err != nil {
+ b.Fatal(err)
+ }
+ b.StartTimer()
+ sink = astutil.Apply(f, test.pre, test.post)
+ }
+ })
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644
index 0000000..7630629
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go
@@ -0,0 +1,14 @@
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+ for {
+ p, ok := e.(*ast.ParenExpr)
+ if !ok {
+ return e
+ }
+ e = p.X
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
new file mode 100644
index 0000000..c0cb03e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
@@ -0,0 +1,198 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package buildutil provides utilities related to the go/build
+// package in the standard library.
+//
+// All I/O is done via the build.Context file system interface, which must
+// be concurrency-safe.
+package buildutil // import "golang.org/x/tools/go/buildutil"
+
+import (
+ "go/build"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+)
+
+// AllPackages returns the package path of each Go package in any source
+// directory of the specified build context (e.g. $GOROOT or an element
+// of $GOPATH). Errors are ignored. The results are sorted.
+// All package paths are canonical, and thus may contain "/vendor/".
+//
+// The result may include import paths for directories that contain no
+// *.go files, such as "archive" (in $GOROOT/src).
+//
+// All I/O is done via the build.Context file system interface,
+// which must be concurrency-safe.
+//
+func AllPackages(ctxt *build.Context) []string {
+ var list []string
+ ForEachPackage(ctxt, func(pkg string, _ error) {
+ list = append(list, pkg)
+ })
+ sort.Strings(list)
+ return list
+}
+
+// ForEachPackage calls the found function with the package path of
+// each Go package it finds in any source directory of the specified
+// build context (e.g. $GOROOT or an element of $GOPATH).
+// All package paths are canonical, and thus may contain "/vendor/".
+//
+// If the package directory exists but could not be read, the second
+// argument to the found function provides the error.
+//
+// All I/O is done via the build.Context file system interface,
+// which must be concurrency-safe.
+//
+func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
+ ch := make(chan item)
+
+ var wg sync.WaitGroup
+ for _, root := range ctxt.SrcDirs() {
+ root := root
+ wg.Add(1)
+ go func() {
+ allPackages(ctxt, root, ch)
+ wg.Done()
+ }()
+ }
+ go func() {
+ wg.Wait()
+ close(ch)
+ }()
+
+ // All calls to found occur in the caller's goroutine.
+ for i := range ch {
+ found(i.importPath, i.err)
+ }
+}
+
+type item struct {
+ importPath string
+ err error // (optional)
+}
+
+// We use a process-wide counting semaphore to limit
+// the number of parallel calls to ReadDir.
+var ioLimit = make(chan bool, 20)
+
+func allPackages(ctxt *build.Context, root string, ch chan<- item) {
+ root = filepath.Clean(root) + string(os.PathSeparator)
+
+ var wg sync.WaitGroup
+
+ var walkDir func(dir string)
+ walkDir = func(dir string) {
+ // Avoid .foo, _foo, and testdata directory trees.
+ base := filepath.Base(dir)
+ if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" {
+ return
+ }
+
+ pkg := filepath.ToSlash(strings.TrimPrefix(dir, root))
+
+ // Prune search if we encounter any of these import paths.
+ switch pkg {
+ case "builtin":
+ return
+ }
+
+ ioLimit <- true
+ files, err := ReadDir(ctxt, dir)
+ <-ioLimit
+ if pkg != "" || err != nil {
+ ch <- item{pkg, err}
+ }
+ for _, fi := range files {
+ fi := fi
+ if fi.IsDir() {
+ wg.Add(1)
+ go func() {
+ walkDir(filepath.Join(dir, fi.Name()))
+ wg.Done()
+ }()
+ }
+ }
+ }
+
+ walkDir(root)
+ wg.Wait()
+}
+
+// ExpandPatterns returns the set of packages matched by patterns,
+// which may have the following forms:
+//
+// golang.org/x/tools/cmd/guru # a single package
+// golang.org/x/tools/... # all packages beneath dir
+// ... # the entire workspace.
+//
+// Order is significant: a pattern preceded by '-' removes matching
+// packages from the set. For example, these patterns match all encoding
+// packages except encoding/xml:
+//
+// encoding/... -encoding/xml
+//
+// A trailing slash in a pattern is ignored. (Path components of Go
+// package names are separated by slash, not the platform's path separator.)
+//
+func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
+ // TODO(adonovan): support other features of 'go list':
+ // - "std"/"cmd"/"all" meta-packages
+ // - "..." not at the end of a pattern
+ // - relative patterns using "./" or "../" prefix
+
+ pkgs := make(map[string]bool)
+ doPkg := func(pkg string, neg bool) {
+ if neg {
+ delete(pkgs, pkg)
+ } else {
+ pkgs[pkg] = true
+ }
+ }
+
+ // Scan entire workspace if wildcards are present.
+ // TODO(adonovan): opt: scan only the necessary subtrees of the workspace.
+ var all []string
+ for _, arg := range patterns {
+ if strings.HasSuffix(arg, "...") {
+ all = AllPackages(ctxt)
+ break
+ }
+ }
+
+ for _, arg := range patterns {
+ if arg == "" {
+ continue
+ }
+
+ neg := arg[0] == '-'
+ if neg {
+ arg = arg[1:]
+ }
+
+ if arg == "..." {
+ // ... matches all packages
+ for _, pkg := range all {
+ doPkg(pkg, neg)
+ }
+ } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg {
+ // dir/... matches all packages beneath dir
+ for _, pkg := range all {
+ if strings.HasPrefix(pkg, dir) &&
+ (len(pkg) == len(dir) || pkg[len(dir)] == '/') {
+ doPkg(pkg, neg)
+ }
+ }
+ } else {
+ // single package
+ doPkg(strings.TrimSuffix(arg, "/"), neg)
+ }
+ }
+
+ return pkgs
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages_test.go b/vendor/golang.org/x/tools/go/buildutil/allpackages_test.go
new file mode 100644
index 0000000..0440ff2
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/allpackages_test.go
@@ -0,0 +1,78 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Incomplete source tree on Android.
+
+// +build !android
+
+package buildutil_test
+
+import (
+ "go/build"
+ "sort"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+func TestAllPackages(t *testing.T) {
+ all := buildutil.AllPackages(&build.Default)
+
+ set := make(map[string]bool)
+ for _, pkg := range all {
+ set[pkg] = true
+ }
+
+ const wantAtLeast = 250
+ if len(all) < wantAtLeast {
+ t.Errorf("Found only %d packages, want at least %d", len(all), wantAtLeast)
+ }
+
+ for _, want := range []string{"fmt", "crypto/sha256", "golang.org/x/tools/go/buildutil"} {
+ if !set[want] {
+ t.Errorf("Package %q not found; got %s", want, all)
+ }
+ }
+}
+
+func TestExpandPatterns(t *testing.T) {
+ tree := make(map[string]map[string]string)
+ for _, pkg := range []string{
+ "encoding",
+ "encoding/xml",
+ "encoding/hex",
+ "encoding/json",
+ "fmt",
+ } {
+ tree[pkg] = make(map[string]string)
+ }
+ ctxt := buildutil.FakeContext(tree)
+
+ for _, test := range []struct {
+ patterns string
+ want string
+ }{
+ {"", ""},
+ {"fmt", "fmt"},
+ {"nosuchpkg", "nosuchpkg"},
+ {"nosuchdir/...", ""},
+ {"...", "encoding encoding/hex encoding/json encoding/xml fmt"},
+ {"encoding/... -encoding/xml", "encoding encoding/hex encoding/json"},
+ {"... -encoding/...", "fmt"},
+ {"encoding", "encoding"},
+ {"encoding/", "encoding"},
+ } {
+ var pkgs []string
+ for pkg := range buildutil.ExpandPatterns(ctxt, strings.Fields(test.patterns)) {
+ pkgs = append(pkgs, pkg)
+ }
+ sort.Strings(pkgs)
+ got := strings.Join(pkgs, " ")
+ if got != test.want {
+ t.Errorf("ExpandPatterns(%s) = %s, want %s",
+ test.patterns, got, test.want)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
new file mode 100644
index 0000000..24cbcbe
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
@@ -0,0 +1,108 @@
+package buildutil
+
+import (
+ "fmt"
+ "go/build"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "sort"
+ "strings"
+ "time"
+)
+
+// FakeContext returns a build.Context for the fake file tree specified
+// by pkgs, which maps package import paths to a mapping from file base
+// names to contents.
+//
+// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
+// the necessary file access methods to read from memory instead of the
+// real file system.
+//
+// Unlike a real file tree, the fake one has only two levels---packages
+// and files---so ReadDir("/go/src/") returns all packages under
+// /go/src/ including, for instance, "math" and "math/big".
+// ReadDir("/go/src/math/big") would return all the files in the
+// "math/big" package.
+//
+func FakeContext(pkgs map[string]map[string]string) *build.Context {
+ clean := func(filename string) string {
+ f := path.Clean(filepath.ToSlash(filename))
+ // Removing "/go/src" while respecting segment
+ // boundaries has this unfortunate corner case:
+ if f == "/go/src" {
+ return ""
+ }
+ return strings.TrimPrefix(f, "/go/src/")
+ }
+
+ ctxt := build.Default // copy
+ ctxt.GOROOT = "/go"
+ ctxt.GOPATH = ""
+ ctxt.IsDir = func(dir string) bool {
+ dir = clean(dir)
+ if dir == "" {
+ return true // needed by (*build.Context).SrcDirs
+ }
+ return pkgs[dir] != nil
+ }
+ ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
+ dir = clean(dir)
+ var fis []os.FileInfo
+ if dir == "" {
+ // enumerate packages
+ for importPath := range pkgs {
+ fis = append(fis, fakeDirInfo(importPath))
+ }
+ } else {
+ // enumerate files of package
+ for basename := range pkgs[dir] {
+ fis = append(fis, fakeFileInfo(basename))
+ }
+ }
+ sort.Sort(byName(fis))
+ return fis, nil
+ }
+ ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
+ filename = clean(filename)
+ dir, base := path.Split(filename)
+ content, ok := pkgs[path.Clean(dir)][base]
+ if !ok {
+ return nil, fmt.Errorf("file not found: %s", filename)
+ }
+ return ioutil.NopCloser(strings.NewReader(content)), nil
+ }
+ ctxt.IsAbsPath = func(path string) bool {
+ path = filepath.ToSlash(path)
+ // Don't rely on the default (filepath.Path) since on
+ // Windows, it reports virtual paths as non-absolute.
+ return strings.HasPrefix(path, "/")
+ }
+ return &ctxt
+}
+
+type byName []os.FileInfo
+
+func (s byName) Len() int { return len(s) }
+func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
+
+type fakeFileInfo string
+
+func (fi fakeFileInfo) Name() string { return string(fi) }
+func (fakeFileInfo) Sys() interface{} { return nil }
+func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
+func (fakeFileInfo) IsDir() bool { return false }
+func (fakeFileInfo) Size() int64 { return 0 }
+func (fakeFileInfo) Mode() os.FileMode { return 0644 }
+
+type fakeDirInfo string
+
+func (fd fakeDirInfo) Name() string { return string(fd) }
+func (fakeDirInfo) Sys() interface{} { return nil }
+func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
+func (fakeDirInfo) IsDir() bool { return true }
+func (fakeDirInfo) Size() int64 { return 0 }
+func (fakeDirInfo) Mode() os.FileMode { return 0755 }
diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go
new file mode 100644
index 0000000..3f71c4f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/overlay.go
@@ -0,0 +1,103 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "io"
+ "io/ioutil"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+// OverlayContext overlays a build.Context with additional files from
+// a map. Files in the map take precedence over other files.
+//
+// In addition to plain string comparison, two file names are
+// considered equal if their base names match and their directory
+// components point at the same directory on the file system. That is,
+// symbolic links are followed for directories, but not files.
+//
+// A common use case for OverlayContext is to allow editors to pass in
+// a set of unsaved, modified files.
+//
+// Currently, only the Context.OpenFile function will respect the
+// overlay. This may change in the future.
+func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context {
+ // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir
+
+ rc := func(data []byte) (io.ReadCloser, error) {
+ return ioutil.NopCloser(bytes.NewBuffer(data)), nil
+ }
+
+ copy := *orig // make a copy
+ ctxt := &copy
+ ctxt.OpenFile = func(path string) (io.ReadCloser, error) {
+ // Fast path: names match exactly.
+ if content, ok := overlay[path]; ok {
+ return rc(content)
+ }
+
+ // Slow path: check for same file under a different
+ // alias, perhaps due to a symbolic link.
+ for filename, content := range overlay {
+ if sameFile(path, filename) {
+ return rc(content)
+ }
+ }
+
+ return OpenFile(orig, path)
+ }
+ return ctxt
+}
+
+// ParseOverlayArchive parses an archive containing Go files and their
+// contents. The result is intended to be used with OverlayContext.
+//
+//
+// Archive format
+//
+// The archive consists of a series of files. Each file consists of a
+// name, a decimal file size and the file contents, separated by
+// newlinews. No newline follows after the file contents.
+func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) {
+ overlay := make(map[string][]byte)
+ r := bufio.NewReader(archive)
+ for {
+ // Read file name.
+ filename, err := r.ReadString('\n')
+ if err != nil {
+ if err == io.EOF {
+ break // OK
+ }
+ return nil, fmt.Errorf("reading archive file name: %v", err)
+ }
+ filename = filepath.Clean(strings.TrimSpace(filename))
+
+ // Read file size.
+ sz, err := r.ReadString('\n')
+ if err != nil {
+ return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err)
+ }
+ sz = strings.TrimSpace(sz)
+ size, err := strconv.ParseUint(sz, 10, 32)
+ if err != nil {
+ return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err)
+ }
+
+ // Read file content.
+ content := make([]byte, size)
+ if _, err := io.ReadFull(r, content); err != nil {
+ return nil, fmt.Errorf("reading archive file %s: %v", filename, err)
+ }
+ overlay[filename] = content
+ }
+
+ return overlay, nil
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay_test.go b/vendor/golang.org/x/tools/go/buildutil/overlay_test.go
new file mode 100644
index 0000000..92e2258
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/overlay_test.go
@@ -0,0 +1,70 @@
+package buildutil_test
+
+import (
+ "go/build"
+ "io/ioutil"
+ "reflect"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+func TestParseOverlayArchive(t *testing.T) {
+ var tt = []struct {
+ in string
+ out map[string][]byte
+ hasErr bool
+ }{
+ {
+ "a.go\n5\n12345",
+ map[string][]byte{"a.go": []byte("12345")},
+ false,
+ },
+ {
+ "a.go\n5\n1234",
+ nil,
+ true,
+ },
+ {
+ "a.go\n5\n12345b.go\n4\n1234",
+ map[string][]byte{"a.go": []byte("12345"), "b.go": []byte("1234")},
+ false,
+ },
+ }
+
+ for _, test := range tt {
+ got, err := buildutil.ParseOverlayArchive(strings.NewReader(test.in))
+ if err == nil && test.hasErr {
+ t.Errorf("expected error for %q", test.in)
+ }
+ if err != nil && !test.hasErr {
+ t.Errorf("unexpected error %v for %q", err, test.in)
+ }
+ if !reflect.DeepEqual(got, test.out) {
+ t.Errorf("got %#v, want %#v", got, test.out)
+ }
+ }
+}
+
+func TestOverlay(t *testing.T) {
+ ctx := &build.Default
+ ov := map[string][]byte{
+ "/somewhere/a.go": []byte("file contents"),
+ }
+ names := []string{"/somewhere/a.go", "/somewhere//a.go"}
+ ctx = buildutil.OverlayContext(ctx, ov)
+ for _, name := range names {
+ f, err := buildutil.OpenFile(ctx, name)
+ if err != nil {
+ t.Errorf("unexpected error %v", err)
+ }
+ b, err := ioutil.ReadAll(f)
+ if err != nil {
+ t.Errorf("unexpected error %v", err)
+ }
+ if got, expected := string(b), string(ov["/somewhere/a.go"]); got != expected {
+ t.Errorf("read %q, expected %q", got, expected)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go
new file mode 100644
index 0000000..486606f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/tags.go
@@ -0,0 +1,75 @@
+package buildutil
+
+// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go.
+
+import "fmt"
+
+const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " +
+ "For more information about build tags, see the description of " +
+ "build constraints in the documentation for the go/build package"
+
+// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses
+// a flag value in the same manner as go build's -tags flag and
+// populates a []string slice.
+//
+// See $GOROOT/src/go/build/doc.go for description of build tags.
+// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
+//
+// Example:
+// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
+type TagsFlag []string
+
+func (v *TagsFlag) Set(s string) error {
+ var err error
+ *v, err = splitQuotedFields(s)
+ if *v == nil {
+ *v = []string{}
+ }
+ return err
+}
+
+func (v *TagsFlag) Get() interface{} { return *v }
+
+func splitQuotedFields(s string) ([]string, error) {
+ // Split fields allowing '' or "" around elements.
+ // Quotes further inside the string do not count.
+ var f []string
+ for len(s) > 0 {
+ for len(s) > 0 && isSpaceByte(s[0]) {
+ s = s[1:]
+ }
+ if len(s) == 0 {
+ break
+ }
+ // Accepted quoted string. No unescaping inside.
+ if s[0] == '"' || s[0] == '\'' {
+ quote := s[0]
+ s = s[1:]
+ i := 0
+ for i < len(s) && s[i] != quote {
+ i++
+ }
+ if i >= len(s) {
+ return nil, fmt.Errorf("unterminated %c string", quote)
+ }
+ f = append(f, s[:i])
+ s = s[i+1:]
+ continue
+ }
+ i := 0
+ for i < len(s) && !isSpaceByte(s[i]) {
+ i++
+ }
+ f = append(f, s[:i])
+ s = s[i:]
+ }
+ return f, nil
+}
+
+func (v *TagsFlag) String() string {
+ return "<tagsFlag>"
+}
+
+func isSpaceByte(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\n' || c == '\r'
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/tags_test.go b/vendor/golang.org/x/tools/go/buildutil/tags_test.go
new file mode 100644
index 0000000..0fc2618
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/tags_test.go
@@ -0,0 +1,28 @@
+package buildutil_test
+
+import (
+ "flag"
+ "go/build"
+ "reflect"
+ "testing"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+func TestTags(t *testing.T) {
+ f := flag.NewFlagSet("TestTags", flag.PanicOnError)
+ var ctxt build.Context
+ f.Var((*buildutil.TagsFlag)(&ctxt.BuildTags), "tags", buildutil.TagsFlagDoc)
+ f.Parse([]string{"-tags", ` 'one'"two" 'three "four"'`, "rest"})
+
+ // BuildTags
+ want := []string{"one", "two", "three \"four\""}
+ if !reflect.DeepEqual(ctxt.BuildTags, want) {
+ t.Errorf("BuildTags = %q, want %q", ctxt.BuildTags, want)
+ }
+
+ // Args()
+ if want := []string{"rest"}; !reflect.DeepEqual(f.Args(), want) {
+ t.Errorf("f.Args() = %q, want %q", f.Args(), want)
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go
new file mode 100644
index 0000000..fc923d7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/util.go
@@ -0,0 +1,212 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+// ParseFile behaves like parser.ParseFile,
+// but uses the build context's file system interface, if any.
+//
+// If file is not absolute (as defined by IsAbsPath), the (dir, file)
+// components are joined using JoinPath; dir must be absolute.
+//
+// The displayPath function, if provided, is used to transform the
+// filename that will be attached to the ASTs.
+//
+// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
+//
+func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
+ if !IsAbsPath(ctxt, file) {
+ file = JoinPath(ctxt, dir, file)
+ }
+ rd, err := OpenFile(ctxt, file)
+ if err != nil {
+ return nil, err
+ }
+ defer rd.Close() // ignore error
+ if displayPath != nil {
+ file = displayPath(file)
+ }
+ return parser.ParseFile(fset, file, rd, mode)
+}
+
+// ContainingPackage returns the package containing filename.
+//
+// If filename is not absolute, it is interpreted relative to working directory dir.
+// All I/O is via the build context's file system interface, if any.
+//
+// The '...Files []string' fields of the resulting build.Package are not
+// populated (build.FindOnly mode).
+//
+func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
+ if !IsAbsPath(ctxt, filename) {
+ filename = JoinPath(ctxt, dir, filename)
+ }
+
+ // We must not assume the file tree uses
+ // "/" always,
+ // `\` always,
+ // or os.PathSeparator (which varies by platform),
+ // but to make any progress, we are forced to assume that
+ // paths will not use `\` unless the PathSeparator
+ // is also `\`, thus we can rely on filepath.ToSlash for some sanity.
+
+ dirSlash := path.Dir(filepath.ToSlash(filename)) + "/"
+
+ // We assume that no source root (GOPATH[i] or GOROOT) contains any other.
+ for _, srcdir := range ctxt.SrcDirs() {
+ srcdirSlash := filepath.ToSlash(srcdir) + "/"
+ if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok {
+ return ctxt.Import(importPath, dir, build.FindOnly)
+ }
+ }
+
+ return nil, fmt.Errorf("can't find package containing %s", filename)
+}
+
+// -- Effective methods of file system interface -------------------------
+
+// (go/build.Context defines these as methods, but does not export them.)
+
+// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses
+// the local file system to answer the question.
+func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) {
+ if f := ctxt.HasSubdir; f != nil {
+ return f(root, dir)
+ }
+
+ // Try using paths we received.
+ if rel, ok = hasSubdir(root, dir); ok {
+ return
+ }
+
+ // Try expanding symlinks and comparing
+ // expanded against unexpanded and
+ // expanded against expanded.
+ rootSym, _ := filepath.EvalSymlinks(root)
+ dirSym, _ := filepath.EvalSymlinks(dir)
+
+ if rel, ok = hasSubdir(rootSym, dir); ok {
+ return
+ }
+ if rel, ok = hasSubdir(root, dirSym); ok {
+ return
+ }
+ return hasSubdir(rootSym, dirSym)
+}
+
+func hasSubdir(root, dir string) (rel string, ok bool) {
+ const sep = string(filepath.Separator)
+ root = filepath.Clean(root)
+ if !strings.HasSuffix(root, sep) {
+ root += sep
+ }
+
+ dir = filepath.Clean(dir)
+ if !strings.HasPrefix(dir, root) {
+ return "", false
+ }
+
+ return filepath.ToSlash(dir[len(root):]), true
+}
+
+// FileExists returns true if the specified file exists,
+// using the build context's file system interface.
+func FileExists(ctxt *build.Context, path string) bool {
+ if ctxt.OpenFile != nil {
+ r, err := ctxt.OpenFile(path)
+ if err != nil {
+ return false
+ }
+ r.Close() // ignore error
+ return true
+ }
+ _, err := os.Stat(path)
+ return err == nil
+}
+
+// OpenFile behaves like os.Open,
+// but uses the build context's file system interface, if any.
+func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) {
+ if ctxt.OpenFile != nil {
+ return ctxt.OpenFile(path)
+ }
+ return os.Open(path)
+}
+
+// IsAbsPath behaves like filepath.IsAbs,
+// but uses the build context's file system interface, if any.
+func IsAbsPath(ctxt *build.Context, path string) bool {
+ if ctxt.IsAbsPath != nil {
+ return ctxt.IsAbsPath(path)
+ }
+ return filepath.IsAbs(path)
+}
+
+// JoinPath behaves like filepath.Join,
+// but uses the build context's file system interface, if any.
+func JoinPath(ctxt *build.Context, path ...string) string {
+ if ctxt.JoinPath != nil {
+ return ctxt.JoinPath(path...)
+ }
+ return filepath.Join(path...)
+}
+
+// IsDir behaves like os.Stat plus IsDir,
+// but uses the build context's file system interface, if any.
+func IsDir(ctxt *build.Context, path string) bool {
+ if ctxt.IsDir != nil {
+ return ctxt.IsDir(path)
+ }
+ fi, err := os.Stat(path)
+ return err == nil && fi.IsDir()
+}
+
+// ReadDir behaves like ioutil.ReadDir,
+// but uses the build context's file system interface, if any.
+func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) {
+ if ctxt.ReadDir != nil {
+ return ctxt.ReadDir(path)
+ }
+ return ioutil.ReadDir(path)
+}
+
+// SplitPathList behaves like filepath.SplitList,
+// but uses the build context's file system interface, if any.
+func SplitPathList(ctxt *build.Context, s string) []string {
+ if ctxt.SplitPathList != nil {
+ return ctxt.SplitPathList(s)
+ }
+ return filepath.SplitList(s)
+}
+
+// sameFile returns true if x and y have the same basename and denote
+// the same file.
+//
+func sameFile(x, y string) bool {
+ if path.Clean(x) == path.Clean(y) {
+ return true
+ }
+ if filepath.Base(x) == filepath.Base(y) { // (optimisation)
+ if xi, err := os.Stat(x); err == nil {
+ if yi, err := os.Stat(y); err == nil {
+ return os.SameFile(xi, yi)
+ }
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/util_test.go b/vendor/golang.org/x/tools/go/buildutil/util_test.go
new file mode 100644
index 0000000..72db317
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/util_test.go
@@ -0,0 +1,72 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil_test
+
+import (
+ "go/build"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "runtime"
+ "testing"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+func TestContainingPackage(t *testing.T) {
+ // unvirtualized:
+ goroot := runtime.GOROOT()
+ gopath := filepath.SplitList(os.Getenv("GOPATH"))[0]
+
+ type Test struct {
+ gopath, filename, wantPkg string
+ }
+
+ tests := []Test{
+ {gopath, goroot + "/src/fmt/print.go", "fmt"},
+ {gopath, goroot + "/src/encoding/json/foo.go", "encoding/json"},
+ {gopath, goroot + "/src/encoding/missing/foo.go", "(not found)"},
+ {gopath, gopath + "/src/golang.org/x/tools/go/buildutil/util_test.go",
+ "golang.org/x/tools/go/buildutil"},
+ }
+
+ if runtime.GOOS != "windows" && runtime.GOOS != "plan9" {
+ // Make a symlink to gopath for test
+ tmp, err := ioutil.TempDir(os.TempDir(), "go")
+ if err != nil {
+ t.Errorf("Unable to create a temporary directory in %s", os.TempDir())
+ }
+
+ defer os.RemoveAll(tmp)
+
+ // symlink between $GOPATH/src and /tmp/go/src
+ // in order to test all possible symlink cases
+ if err := os.Symlink(gopath+"/src", tmp+"/src"); err != nil {
+ t.Fatal(err)
+ }
+ tests = append(tests, []Test{
+ {gopath, tmp + "/src/golang.org/x/tools/go/buildutil/util_test.go", "golang.org/x/tools/go/buildutil"},
+ {tmp, gopath + "/src/golang.org/x/tools/go/buildutil/util_test.go", "golang.org/x/tools/go/buildutil"},
+ {tmp, tmp + "/src/golang.org/x/tools/go/buildutil/util_test.go", "golang.org/x/tools/go/buildutil"},
+ }...)
+ }
+
+ for _, test := range tests {
+ var got string
+ var buildContext = build.Default
+ buildContext.GOPATH = test.gopath
+ bp, err := buildutil.ContainingPackage(&buildContext, ".", test.filename)
+ if err != nil {
+ got = "(not found)"
+ } else {
+ got = bp.ImportPath
+ }
+ if got != test.wantPkg {
+ t.Errorf("ContainingPackage(%q) = %s, want %s", test.filename, got, test.wantPkg)
+ }
+ }
+
+ // TODO(adonovan): test on virtualized GOPATH too.
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/util_windows_test.go b/vendor/golang.org/x/tools/go/buildutil/util_windows_test.go
new file mode 100644
index 0000000..86fe9c7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/util_windows_test.go
@@ -0,0 +1,48 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil_test
+
+import (
+ "fmt"
+ "go/build"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+func testContainingPackageCaseFold(file, want string) error {
+ bp, err := buildutil.ContainingPackage(&build.Default, ".", file)
+ if err != nil {
+ return err
+ }
+ if got := bp.ImportPath; got != want {
+ return fmt.Errorf("ContainingPackage(%q) = %s, want %s", file, got, want)
+ }
+ return nil
+}
+
+func TestContainingPackageCaseFold(t *testing.T) {
+ path := filepath.Join(runtime.GOROOT(), `src\fmt\print.go`)
+ err := testContainingPackageCaseFold(path, "fmt")
+ if err != nil {
+ t.Error(err)
+ }
+ vol := filepath.VolumeName(path)
+ if len(vol) != 2 || vol[1] != ':' {
+ t.Fatalf("GOROOT path has unexpected volume name: %v", vol)
+ }
+ rest := path[len(vol):]
+ err = testContainingPackageCaseFold(strings.ToUpper(vol)+rest, "fmt")
+ if err != nil {
+ t.Error(err)
+ }
+ err = testContainingPackageCaseFold(strings.ToLower(vol)+rest, "fmt")
+ if err != nil {
+ t.Error(err)
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/callgraph.go b/vendor/golang.org/x/tools/go/callgraph/callgraph.go
new file mode 100644
index 0000000..707a319
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/callgraph.go
@@ -0,0 +1,129 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+
+Package callgraph defines the call graph and various algorithms
+and utilities to operate on it.
+
+A call graph is a labelled directed graph whose nodes represent
+functions and whose edge labels represent syntactic function call
+sites. The presence of a labelled edge (caller, site, callee)
+indicates that caller may call callee at the specified call site.
+
+A call graph is a multigraph: it may contain multiple edges (caller,
+*, callee) connecting the same pair of nodes, so long as the edges
+differ by label; this occurs when one function calls another function
+from multiple call sites. Also, it may contain multiple edges
+(caller, site, *) that differ only by callee; this indicates a
+polymorphic call.
+
+A SOUND call graph is one that overapproximates the dynamic calling
+behaviors of the program in all possible executions. One call graph
+is more PRECISE than another if it is a smaller overapproximation of
+the dynamic behavior.
+
+All call graphs have a synthetic root node which is responsible for
+calling main() and init().
+
+Calls to built-in functions (e.g. panic, println) are not represented
+in the call graph; they are treated like built-in operators of the
+language.
+
+*/
+package callgraph // import "golang.org/x/tools/go/callgraph"
+
+// TODO(adonovan): add a function to eliminate wrappers from the
+// callgraph, preserving topology.
+// More generally, we could eliminate "uninteresting" nodes such as
+// nodes from packages we don't care about.
+
+import (
+ "fmt"
+ "go/token"
+
+ "golang.org/x/tools/go/ssa"
+)
+
+// A Graph represents a call graph.
+//
+// A graph may contain nodes that are not reachable from the root.
+// If the call graph is sound, such nodes indicate unreachable
+// functions.
+//
+type Graph struct {
+ Root *Node // the distinguished root node
+ Nodes map[*ssa.Function]*Node // all nodes by function
+}
+
+// New returns a new Graph with the specified root node.
+func New(root *ssa.Function) *Graph {
+ g := &Graph{Nodes: make(map[*ssa.Function]*Node)}
+ g.Root = g.CreateNode(root)
+ return g
+}
+
+// CreateNode returns the Node for fn, creating it if not present.
+func (g *Graph) CreateNode(fn *ssa.Function) *Node {
+ n, ok := g.Nodes[fn]
+ if !ok {
+ n = &Node{Func: fn, ID: len(g.Nodes)}
+ g.Nodes[fn] = n
+ }
+ return n
+}
+
+// A Node represents a node in a call graph.
+type Node struct {
+ Func *ssa.Function // the function this node represents
+ ID int // 0-based sequence number
+ In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n)
+ Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n)
+}
+
+func (n *Node) String() string {
+ return fmt.Sprintf("n%d:%s", n.ID, n.Func)
+}
+
+// A Edge represents an edge in the call graph.
+//
+// Site is nil for edges originating in synthetic or intrinsic
+// functions, e.g. reflect.Call or the root of the call graph.
+type Edge struct {
+ Caller *Node
+ Site ssa.CallInstruction
+ Callee *Node
+}
+
+func (e Edge) String() string {
+ return fmt.Sprintf("%s --> %s", e.Caller, e.Callee)
+}
+
+func (e Edge) Description() string {
+ var prefix string
+ switch e.Site.(type) {
+ case nil:
+ return "synthetic call"
+ case *ssa.Go:
+ prefix = "concurrent "
+ case *ssa.Defer:
+ prefix = "deferred "
+ }
+ return prefix + e.Site.Common().Description()
+}
+
+func (e Edge) Pos() token.Pos {
+ if e.Site == nil {
+ return token.NoPos
+ }
+ return e.Site.Pos()
+}
+
+// AddEdge adds the edge (caller, site, callee) to the call graph.
+// Elimination of duplicate edges is the caller's responsibility.
+func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) {
+ e := &Edge{caller, site, callee}
+ callee.In = append(callee.In, e)
+ caller.Out = append(caller.Out, e)
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/cha.go b/vendor/golang.org/x/tools/go/callgraph/cha/cha.go
new file mode 100644
index 0000000..215ff17
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/cha.go
@@ -0,0 +1,139 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cha computes the call graph of a Go program using the Class
+// Hierarchy Analysis (CHA) algorithm.
+//
+// CHA was first described in "Optimization of Object-Oriented Programs
+// Using Static Class Hierarchy Analysis", Jeffrey Dean, David Grove,
+// and Craig Chambers, ECOOP'95.
+//
+// CHA is related to RTA (see go/callgraph/rta); the difference is that
+// CHA conservatively computes the entire "implements" relation between
+// interfaces and concrete types ahead of time, whereas RTA uses dynamic
+// programming to construct it on the fly as it encounters new functions
+// reachable from main. CHA may thus include spurious call edges for
+// types that haven't been instantiated yet, or types that are never
+// instantiated.
+//
+// Since CHA conservatively assumes that all functions are address-taken
+// and all concrete types are put into interfaces, it is sound to run on
+// partial programs, such as libraries without a main or test function.
+//
+package cha // import "golang.org/x/tools/go/callgraph/cha"
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/go/callgraph"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// CallGraph computes the call graph of the specified program using the
+// Class Hierarchy Analysis algorithm.
+//
+func CallGraph(prog *ssa.Program) *callgraph.Graph {
+ cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
+
+ allFuncs := ssautil.AllFunctions(prog)
+
+ // funcsBySig contains all functions, keyed by signature. It is
+ // the effective set of address-taken functions used to resolve
+ // a dynamic call of a particular signature.
+ var funcsBySig typeutil.Map // value is []*ssa.Function
+
+ // methodsByName contains all methods,
+ // grouped by name for efficient lookup.
+ // (methodsById would be better but not every SSA method has a go/types ID.)
+ methodsByName := make(map[string][]*ssa.Function)
+
+ // An imethod represents an interface method I.m.
+ // (There's no go/types object for it;
+ // a *types.Func may be shared by many interfaces due to interface embedding.)
+ type imethod struct {
+ I *types.Interface
+ id string
+ }
+ // methodsMemo records, for every abstract method call I.m on
+ // interface type I, the set of concrete methods C.m of all
+ // types C that satisfy interface I.
+ //
+ // Abstract methods may be shared by several interfaces,
+ // hence we must pass I explicitly, not guess from m.
+ //
+ // methodsMemo is just a cache, so it needn't be a typeutil.Map.
+ methodsMemo := make(map[imethod][]*ssa.Function)
+ lookupMethods := func(I *types.Interface, m *types.Func) []*ssa.Function {
+ id := m.Id()
+ methods, ok := methodsMemo[imethod{I, id}]
+ if !ok {
+ for _, f := range methodsByName[m.Name()] {
+ C := f.Signature.Recv().Type() // named or *named
+ if types.Implements(C, I) {
+ methods = append(methods, f)
+ }
+ }
+ methodsMemo[imethod{I, id}] = methods
+ }
+ return methods
+ }
+
+ for f := range allFuncs {
+ if f.Signature.Recv() == nil {
+ // Package initializers can never be address-taken.
+ if f.Name() == "init" && f.Synthetic == "package initializer" {
+ continue
+ }
+ funcs, _ := funcsBySig.At(f.Signature).([]*ssa.Function)
+ funcs = append(funcs, f)
+ funcsBySig.Set(f.Signature, funcs)
+ } else {
+ methodsByName[f.Name()] = append(methodsByName[f.Name()], f)
+ }
+ }
+
+ addEdge := func(fnode *callgraph.Node, site ssa.CallInstruction, g *ssa.Function) {
+ gnode := cg.CreateNode(g)
+ callgraph.AddEdge(fnode, site, gnode)
+ }
+
+ addEdges := func(fnode *callgraph.Node, site ssa.CallInstruction, callees []*ssa.Function) {
+ // Because every call to a highly polymorphic and
+ // frequently used abstract method such as
+ // (io.Writer).Write is assumed to call every concrete
+ // Write method in the program, the call graph can
+ // contain a lot of duplication.
+ //
+ // TODO(adonovan): opt: consider factoring the callgraph
+ // API so that the Callers component of each edge is a
+ // slice of nodes, not a singleton.
+ for _, g := range callees {
+ addEdge(fnode, site, g)
+ }
+ }
+
+ for f := range allFuncs {
+ fnode := cg.CreateNode(f)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if site, ok := instr.(ssa.CallInstruction); ok {
+ call := site.Common()
+ if call.IsInvoke() {
+ tiface := call.Value.Type().Underlying().(*types.Interface)
+ addEdges(fnode, site, lookupMethods(tiface, call.Method))
+ } else if g := call.StaticCallee(); g != nil {
+ addEdge(fnode, site, g)
+ } else if _, ok := call.Value.(*ssa.Builtin); !ok {
+ callees, _ := funcsBySig.At(call.Signature()).([]*ssa.Function)
+ addEdges(fnode, site, callees)
+ }
+ }
+ }
+ }
+ }
+
+ return cg
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/cha_test.go b/vendor/golang.org/x/tools/go/callgraph/cha/cha_test.go
new file mode 100644
index 0000000..cb2d585
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/cha_test.go
@@ -0,0 +1,111 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// No testdata on Android.
+
+// +build !android
+
+package cha_test
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "io/ioutil"
+ "sort"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/callgraph"
+ "golang.org/x/tools/go/callgraph/cha"
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+var inputs = []string{
+ "testdata/func.go",
+ "testdata/iface.go",
+ "testdata/recv.go",
+ "testdata/issue23925.go",
+}
+
+func expectation(f *ast.File) (string, token.Pos) {
+ for _, c := range f.Comments {
+ text := strings.TrimSpace(c.Text())
+ if t := strings.TrimPrefix(text, "WANT:\n"); t != text {
+ return t, c.Pos()
+ }
+ }
+ return "", token.NoPos
+}
+
+// TestCHA runs CHA on each file in inputs, prints the dynamic edges of
+// the call graph, and compares it with the golden results embedded in
+// the WANT comment at the end of the file.
+//
+func TestCHA(t *testing.T) {
+ for _, filename := range inputs {
+ content, err := ioutil.ReadFile(filename)
+ if err != nil {
+ t.Errorf("couldn't read file '%s': %s", filename, err)
+ continue
+ }
+
+ conf := loader.Config{
+ ParserMode: parser.ParseComments,
+ }
+ f, err := conf.ParseFile(filename, content)
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+
+ want, pos := expectation(f)
+ if pos == token.NoPos {
+ t.Errorf("No WANT: comment in %s", filename)
+ continue
+ }
+
+ conf.CreateFromFiles("main", f)
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+
+ prog := ssautil.CreateProgram(iprog, 0)
+ mainPkg := prog.Package(iprog.Created[0].Pkg)
+ prog.Build()
+
+ cg := cha.CallGraph(prog)
+
+ if got := printGraph(cg, mainPkg.Pkg); got != want {
+ t.Errorf("%s: got:\n%s\nwant:\n%s",
+ prog.Fset.Position(pos), got, want)
+ }
+ }
+}
+
+func printGraph(cg *callgraph.Graph, from *types.Package) string {
+ var edges []string
+ callgraph.GraphVisitEdges(cg, func(e *callgraph.Edge) error {
+ if strings.Contains(e.Description(), "dynamic") {
+ edges = append(edges, fmt.Sprintf("%s --> %s",
+ e.Caller.Func.RelString(from),
+ e.Callee.Func.RelString(from)))
+ }
+ return nil
+ })
+ sort.Strings(edges)
+
+ var buf bytes.Buffer
+ buf.WriteString("Dynamic calls\n")
+ for _, edge := range edges {
+ fmt.Fprintf(&buf, " %s\n", edge)
+ }
+ return strings.TrimSpace(buf.String())
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/testdata/func.go b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/func.go
new file mode 100644
index 0000000..ad483f1
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/func.go
@@ -0,0 +1,23 @@
+//+build ignore
+
+package main
+
+// Test of dynamic function calls; no interfaces.
+
+func A(int) {}
+
+var (
+ B = func(int) {}
+ C = func(int) {}
+)
+
+func f() {
+ pfn := B
+ pfn(0) // calls A, B, C, even though A is not even address-taken
+}
+
+// WANT:
+// Dynamic calls
+// f --> A
+// f --> init$1
+// f --> init$2
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/testdata/iface.go b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/iface.go
new file mode 100644
index 0000000..1622ec1
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/iface.go
@@ -0,0 +1,65 @@
+//+build ignore
+
+package main
+
+// Test of interface calls. None of the concrete types are ever
+// instantiated or converted to interfaces.
+
+type I interface {
+ f()
+}
+
+type J interface {
+ f()
+ g()
+}
+
+type C int // implements I
+
+func (*C) f()
+
+type D int // implements I and J
+
+func (*D) f()
+func (*D) g()
+
+func one(i I, j J) {
+ i.f() // calls *C and *D
+}
+
+func two(i I, j J) {
+ j.f() // calls *D (but not *C, even though it defines method f)
+}
+
+func three(i I, j J) {
+ j.g() // calls *D
+}
+
+func four(i I, j J) {
+ Jf := J.f
+ if unknown {
+ Jf = nil // suppress SSA constant propagation
+ }
+ Jf(nil) // calls *D
+}
+
+func five(i I, j J) {
+ jf := j.f
+ if unknown {
+ jf = nil // suppress SSA constant propagation
+ }
+ jf() // calls *D
+}
+
+var unknown bool
+
+// WANT:
+// Dynamic calls
+// (J).f$bound --> (*D).f
+// (J).f$thunk --> (*D).f
+// five --> (J).f$bound
+// four --> (J).f$thunk
+// one --> (*C).f
+// one --> (*D).f
+// three --> (*D).g
+// two --> (*D).f
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/testdata/issue23925.go b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/issue23925.go
new file mode 100644
index 0000000..59eaf18
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/issue23925.go
@@ -0,0 +1,38 @@
+package main
+
+// Regression test for https://github.com/golang/go/issues/23925
+
+type stringFlagImpl string
+
+func (*stringFlagImpl) Set(s string) error { return nil }
+
+type boolFlagImpl bool
+
+func (*boolFlagImpl) Set(s string) error { return nil }
+func (*boolFlagImpl) extra() {}
+
+// A copy of flag.boolFlag interface, without a dependency.
+// Must appear first, so that it becomes the owner of the Set methods.
+type boolFlag interface {
+ flagValue
+ extra()
+}
+
+// A copy of flag.Value, without adding a dependency.
+type flagValue interface {
+ Set(string) error
+}
+
+func main() {
+ var x flagValue = new(stringFlagImpl)
+ x.Set("")
+
+ var y boolFlag = new(boolFlagImpl)
+ y.Set("")
+}
+
+// WANT:
+// Dynamic calls
+// main --> (*boolFlagImpl).Set
+// main --> (*boolFlagImpl).Set
+// main --> (*stringFlagImpl).Set
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/testdata/recv.go b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/recv.go
new file mode 100644
index 0000000..5ba48e9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/testdata/recv.go
@@ -0,0 +1,37 @@
+//+build ignore
+
+package main
+
+type I interface {
+ f()
+}
+
+type J interface {
+ g()
+}
+
+type C int // C and *C implement I; *C implements J
+
+func (C) f()
+func (*C) g()
+
+type D int // *D implements I and J
+
+func (*D) f()
+func (*D) g()
+
+func f(i I) {
+ i.f() // calls C, *C, *D
+}
+
+func g(j J) {
+ j.g() // calls *C, *D
+}
+
+// WANT:
+// Dynamic calls
+// f --> (*C).f
+// f --> (*D).f
+// f --> (C).f
+// g --> (*C).g
+// g --> (*D).g
diff --git a/vendor/golang.org/x/tools/go/callgraph/util.go b/vendor/golang.org/x/tools/go/callgraph/util.go
new file mode 100644
index 0000000..a8f8903
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/util.go
@@ -0,0 +1,181 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package callgraph
+
+import "golang.org/x/tools/go/ssa"
+
+// This file provides various utilities over call graphs, such as
+// visitation and path search.
+
+// CalleesOf returns a new set containing all direct callees of the
+// caller node.
+//
+func CalleesOf(caller *Node) map[*Node]bool {
+ callees := make(map[*Node]bool)
+ for _, e := range caller.Out {
+ callees[e.Callee] = true
+ }
+ return callees
+}
+
+// GraphVisitEdges visits all the edges in graph g in depth-first order.
+// The edge function is called for each edge in postorder. If it
+// returns non-nil, visitation stops and GraphVisitEdges returns that
+// value.
+//
+func GraphVisitEdges(g *Graph, edge func(*Edge) error) error {
+ seen := make(map[*Node]bool)
+ var visit func(n *Node) error
+ visit = func(n *Node) error {
+ if !seen[n] {
+ seen[n] = true
+ for _, e := range n.Out {
+ if err := visit(e.Callee); err != nil {
+ return err
+ }
+ if err := edge(e); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }
+ for _, n := range g.Nodes {
+ if err := visit(n); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// PathSearch finds an arbitrary path starting at node start and
+// ending at some node for which isEnd() returns true. On success,
+// PathSearch returns the path as an ordered list of edges; on
+// failure, it returns nil.
+//
+func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge {
+ stack := make([]*Edge, 0, 32)
+ seen := make(map[*Node]bool)
+ var search func(n *Node) []*Edge
+ search = func(n *Node) []*Edge {
+ if !seen[n] {
+ seen[n] = true
+ if isEnd(n) {
+ return stack
+ }
+ for _, e := range n.Out {
+ stack = append(stack, e) // push
+ if found := search(e.Callee); found != nil {
+ return found
+ }
+ stack = stack[:len(stack)-1] // pop
+ }
+ }
+ return nil
+ }
+ return search(start)
+}
+
+// DeleteSyntheticNodes removes from call graph g all nodes for
+// synthetic functions (except g.Root and package initializers),
+// preserving the topology. In effect, calls to synthetic wrappers
+// are "inlined".
+//
+func (g *Graph) DeleteSyntheticNodes() {
+ // Measurements on the standard library and go.tools show that
+ // resulting graph has ~15% fewer nodes and 4-8% fewer edges
+ // than the input.
+ //
+ // Inlining a wrapper of in-degree m, out-degree n adds m*n
+ // and removes m+n edges. Since most wrappers are monomorphic
+ // (n=1) this results in a slight reduction. Polymorphic
+ // wrappers (n>1), e.g. from embedding an interface value
+ // inside a struct to satisfy some interface, cause an
+ // increase in the graph, but they seem to be uncommon.
+
+ // Hash all existing edges to avoid creating duplicates.
+ edges := make(map[Edge]bool)
+ for _, cgn := range g.Nodes {
+ for _, e := range cgn.Out {
+ edges[*e] = true
+ }
+ }
+ for fn, cgn := range g.Nodes {
+ if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) {
+ continue // keep
+ }
+ for _, eIn := range cgn.In {
+ for _, eOut := range cgn.Out {
+ newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee}
+ if edges[newEdge] {
+ continue // don't add duplicate
+ }
+ AddEdge(eIn.Caller, eIn.Site, eOut.Callee)
+ edges[newEdge] = true
+ }
+ }
+ g.DeleteNode(cgn)
+ }
+}
+
+func isInit(fn *ssa.Function) bool {
+ return fn.Pkg != nil && fn.Pkg.Func("init") == fn
+}
+
+// DeleteNode removes node n and its edges from the graph g.
+// (NB: not efficient for batch deletion.)
+func (g *Graph) DeleteNode(n *Node) {
+ n.deleteIns()
+ n.deleteOuts()
+ delete(g.Nodes, n.Func)
+}
+
+// deleteIns deletes all incoming edges to n.
+func (n *Node) deleteIns() {
+ for _, e := range n.In {
+ removeOutEdge(e)
+ }
+ n.In = nil
+}
+
+// deleteOuts deletes all outgoing edges from n.
+func (n *Node) deleteOuts() {
+ for _, e := range n.Out {
+ removeInEdge(e)
+ }
+ n.Out = nil
+}
+
+// removeOutEdge removes edge.Caller's outgoing edge 'edge'.
+func removeOutEdge(edge *Edge) {
+ caller := edge.Caller
+ n := len(caller.Out)
+ for i, e := range caller.Out {
+ if e == edge {
+ // Replace it with the final element and shrink the slice.
+ caller.Out[i] = caller.Out[n-1]
+ caller.Out[n-1] = nil // aid GC
+ caller.Out = caller.Out[:n-1]
+ return
+ }
+ }
+ panic("edge not found: " + edge.String())
+}
+
+// removeInEdge removes edge.Callee's incoming edge 'edge'.
+func removeInEdge(edge *Edge) {
+ caller := edge.Callee
+ n := len(caller.In)
+ for i, e := range caller.In {
+ if e == edge {
+ // Replace it with the final element and shrink the slice.
+ caller.In[i] = caller.In[n-1]
+ caller.In[n-1] = nil // aid GC
+ caller.In = caller.In[:n-1]
+ return
+ }
+ }
+ panic("edge not found: " + edge.String())
+}
diff --git a/vendor/golang.org/x/tools/go/loader/cgo.go b/vendor/golang.org/x/tools/go/loader/cgo.go
new file mode 100644
index 0000000..72c6f50
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/cgo.go
@@ -0,0 +1,207 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+// This file handles cgo preprocessing of files containing `import "C"`.
+//
+// DESIGN
+//
+// The approach taken is to run the cgo processor on the package's
+// CgoFiles and parse the output, faking the filenames of the
+// resulting ASTs so that the synthetic file containing the C types is
+// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
+// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
+// not the names of the actual temporary files.
+//
+// The advantage of this approach is its fidelity to 'go build'. The
+// downside is that the token.Position.Offset for each AST node is
+// incorrect, being an offset within the temporary file. Line numbers
+// should still be correct because of the //line comments.
+//
+// The logic of this file is mostly plundered from the 'go build'
+// tool, which also invokes the cgo preprocessor.
+//
+//
+// REJECTED ALTERNATIVE
+//
+// An alternative approach that we explored is to extend go/types'
+// Importer mechanism to provide the identity of the importing package
+// so that each time `import "C"` appears it resolves to a different
+// synthetic package containing just the objects needed in that case.
+// The loader would invoke cgo but parse only the cgo_types.go file
+// defining the package-level objects, discarding the other files
+// resulting from preprocessing.
+//
+// The benefit of this approach would have been that source-level
+// syntax information would correspond exactly to the original cgo
+// file, with no preprocessing involved, making source tools like
+// godoc, guru, and eg happy. However, the approach was rejected
+// due to the additional complexity it would impose on go/types. (It
+// made for a beautiful demo, though.)
+//
+// cgo files, despite their *.go extension, are not legal Go source
+// files per the specification since they may refer to unexported
+// members of package "C" such as C.int. Also, a function such as
+// C.getpwent has in effect two types, one matching its C type and one
+// which additionally returns (errno C.int). The cgo preprocessor
+// uses name mangling to distinguish these two functions in the
+// processed code, but go/types would need to duplicate this logic in
+// its handling of function calls, analogous to the treatment of map
+// lookups in which y=m[k] and y,ok=m[k] are both legal.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// processCgoFiles invokes the cgo preprocessor on bp.CgoFiles, parses
+// the output and returns the resulting ASTs.
+//
+func processCgoFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
+ tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpdir)
+
+ pkgdir := bp.Dir
+ if DisplayPath != nil {
+ pkgdir = DisplayPath(pkgdir)
+ }
+
+ cgoFiles, cgoDisplayFiles, err := runCgo(bp, pkgdir, tmpdir)
+ if err != nil {
+ return nil, err
+ }
+ var files []*ast.File
+ for i := range cgoFiles {
+ rd, err := os.Open(cgoFiles[i])
+ if err != nil {
+ return nil, err
+ }
+ display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
+ f, err := parser.ParseFile(fset, display, rd, mode)
+ rd.Close()
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f)
+ }
+ return files, nil
+}
+
+var cgoRe = regexp.MustCompile(`[/\\:]`)
+
+// runCgo invokes the cgo preprocessor on bp.CgoFiles and returns two
+// lists of files: the resulting processed files (in temporary
+// directory tmpdir) and the corresponding names of the unprocessed files.
+//
+// runCgo is adapted from (*builder).cgo in
+// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
+// Objective C, CGOPKGPATH, CGO_FLAGS.
+//
+func runCgo(bp *build.Package, pkgdir, tmpdir string) (files, displayFiles []string, err error) {
+ cgoCPPFLAGS, _, _, _ := cflags(bp, true)
+ _, cgoexeCFLAGS, _, _ := cflags(bp, false)
+
+ if len(bp.CgoPkgConfig) > 0 {
+ pcCFLAGS, err := pkgConfigFlags(bp)
+ if err != nil {
+ return nil, nil, err
+ }
+ cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
+ }
+
+ // Allows including _cgo_export.h from .[ch] files in the package.
+ cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
+
+ // _cgo_gotypes.go (displayed "C") contains the type definitions.
+ files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
+ displayFiles = append(displayFiles, "C")
+ for _, fn := range bp.CgoFiles {
+ // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
+ f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
+ files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
+ displayFiles = append(displayFiles, fn)
+ }
+
+ var cgoflags []string
+ if bp.Goroot && bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_runtime_cgo=false")
+ }
+ if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_syscall=false")
+ }
+
+ args := stringList(
+ "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
+ cgoCPPFLAGS, cgoexeCFLAGS, bp.CgoFiles,
+ )
+ if false {
+ log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Dir = pkgdir
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
+ }
+
+ return files, displayFiles, nil
+}
+
+// -- unmodified from 'go build' ---------------------------------------
+
+// Return the flags to use when invoking the C or C++ compilers, or cgo.
+func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
+ var defaults string
+ if def {
+ defaults = "-g -O2"
+ }
+
+ cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
+ cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
+ cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
+ ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
+ return
+}
+
+// envList returns the value of the given environment variable broken
+// into fields, using the default value when the variable is empty.
+func envList(key, def string) []string {
+ v := os.Getenv(key)
+ if v == "" {
+ v = def
+ }
+ return strings.Fields(v)
+}
+
+// stringList's arguments should be a sequence of string or []string values.
+// stringList flattens them into a single []string.
+func stringList(args ...interface{}) []string {
+ var x []string
+ for _, arg := range args {
+ switch arg := arg.(type) {
+ case []string:
+ x = append(x, arg...)
+ case string:
+ x = append(x, arg)
+ default:
+ panic("stringList: invalid argument")
+ }
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go
new file mode 100644
index 0000000..de57422
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go
@@ -0,0 +1,39 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+import (
+ "errors"
+ "fmt"
+ "go/build"
+ "os/exec"
+ "strings"
+)
+
+// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
+func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
+ cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
+ if len(out) > 0 {
+ s = fmt.Sprintf("%s: %s", s, out)
+ }
+ return nil, errors.New(s)
+ }
+ if len(out) > 0 {
+ flags = strings.Fields(string(out))
+ }
+ return
+}
+
+// pkgConfigFlags calls pkg-config if needed and returns the cflags
+// needed to build the package.
+func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
+ if len(p.CgoPkgConfig) == 0 {
+ return nil, nil
+ }
+ return pkgConfig("--cflags", p.CgoPkgConfig)
+}
diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go
new file mode 100644
index 0000000..9b51c9e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/doc.go
@@ -0,0 +1,205 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package loader loads a complete Go program from source code, parsing
+// and type-checking the initial packages plus their transitive closure
+// of dependencies. The ASTs and the derived facts are retained for
+// later use.
+//
+// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
+//
+// The package defines two primary types: Config, which specifies a
+// set of initial packages to load and various other options; and
+// Program, which is the result of successfully loading the packages
+// specified by a configuration.
+//
+// The configuration can be set directly, but *Config provides various
+// convenience methods to simplify the common cases, each of which can
+// be called any number of times. Finally, these are followed by a
+// call to Load() to actually load and type-check the program.
+//
+// var conf loader.Config
+//
+// // Use the command-line arguments to specify
+// // a set of initial packages to load from source.
+// // See FromArgsUsage for help.
+// rest, err := conf.FromArgs(os.Args[1:], wantTests)
+//
+// // Parse the specified files and create an ad hoc package with path "foo".
+// // All files must have the same 'package' declaration.
+// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
+//
+// // Create an ad hoc package with path "foo" from
+// // the specified already-parsed files.
+// // All ASTs must have the same 'package' declaration.
+// conf.CreateFromFiles("foo", parsedFiles)
+//
+// // Add "runtime" to the set of packages to be loaded.
+// conf.Import("runtime")
+//
+// // Adds "fmt" and "fmt_test" to the set of packages
+// // to be loaded. "fmt" will include *_test.go files.
+// conf.ImportWithTests("fmt")
+//
+// // Finally, load all the packages specified by the configuration.
+// prog, err := conf.Load()
+//
+// See examples_test.go for examples of API usage.
+//
+//
+// CONCEPTS AND TERMINOLOGY
+//
+// The WORKSPACE is the set of packages accessible to the loader. The
+// workspace is defined by Config.Build, a *build.Context. The
+// default context treats subdirectories of $GOROOT and $GOPATH as
+// packages, but this behavior may be overridden.
+//
+// An AD HOC package is one specified as a set of source files on the
+// command line. In the simplest case, it may consist of a single file
+// such as $GOROOT/src/net/http/triv.go.
+//
+// EXTERNAL TEST packages are those comprised of a set of *_test.go
+// files all with the same 'package foo_test' declaration, all in the
+// same directory. (go/build.Package calls these files XTestFiles.)
+//
+// An IMPORTABLE package is one that can be referred to by some import
+// spec. Every importable package is uniquely identified by its
+// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
+// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
+// typically denotes a subdirectory of the workspace.
+//
+// An import declaration uses an IMPORT PATH to refer to a package.
+// Most import declarations use the package path as the import path.
+//
+// Due to VENDORING (https://golang.org/s/go15vendor), the
+// interpretation of an import path may depend on the directory in which
+// it appears. To resolve an import path to a package path, go/build
+// must search the enclosing directories for a subdirectory named
+// "vendor".
+//
+// ad hoc packages and external test packages are NON-IMPORTABLE. The
+// path of an ad hoc package is inferred from the package
+// declarations of its files and is therefore not a unique package key.
+// For example, Config.CreatePkgs may specify two initial ad hoc
+// packages, both with path "main".
+//
+// An AUGMENTED package is an importable package P plus all the
+// *_test.go files with same 'package foo' declaration as P.
+// (go/build.Package calls these files TestFiles.)
+//
+// The INITIAL packages are those specified in the configuration. A
+// DEPENDENCY is a package loaded to satisfy an import in an initial
+// package or another dependency.
+//
+package loader
+
+// IMPLEMENTATION NOTES
+//
+// 'go test', in-package test files, and import cycles
+// ---------------------------------------------------
+//
+// An external test package may depend upon members of the augmented
+// package that are not in the unaugmented package, such as functions
+// that expose internals. (See bufio/export_test.go for an example.)
+// So, the loader must ensure that for each external test package
+// it loads, it also augments the corresponding non-test package.
+//
+// The import graph over n unaugmented packages must be acyclic; the
+// import graph over n-1 unaugmented packages plus one augmented
+// package must also be acyclic. ('go test' relies on this.) But the
+// import graph over n augmented packages may contain cycles.
+//
+// First, all the (unaugmented) non-test packages and their
+// dependencies are imported in the usual way; the loader reports an
+// error if it detects an import cycle.
+//
+// Then, each package P for which testing is desired is augmented by
+// the list P' of its in-package test files, by calling
+// (*types.Checker).Files. This arrangement ensures that P' may
+// reference definitions within P, but P may not reference definitions
+// within P'. Furthermore, P' may import any other package, including
+// ones that depend upon P, without an import cycle error.
+//
+// Consider two packages A and B, both of which have lists of
+// in-package test files we'll call A' and B', and which have the
+// following import graph edges:
+// B imports A
+// B' imports A
+// A' imports B
+// This last edge would be expected to create an error were it not
+// for the special type-checking discipline above.
+// Cycles of size greater than two are possible. For example:
+// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
+// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
+// regexp/exec_test.go (package regexp) imports "compress/bzip2"
+//
+//
+// Concurrency
+// -----------
+//
+// Let us define the import dependency graph as follows. Each node is a
+// list of files passed to (Checker).Files at once. Many of these lists
+// are the production code of an importable Go package, so those nodes
+// are labelled by the package's path. The remaining nodes are
+// ad hoc packages and lists of in-package *_test.go files that augment
+// an importable package; those nodes have no label.
+//
+// The edges of the graph represent import statements appearing within a
+// file. An edge connects a node (a list of files) to the node it
+// imports, which is importable and thus always labelled.
+//
+// Loading is controlled by this dependency graph.
+//
+// To reduce I/O latency, we start loading a package's dependencies
+// asynchronously as soon as we've parsed its files and enumerated its
+// imports (scanImports). This performs a preorder traversal of the
+// import dependency graph.
+//
+// To exploit hardware parallelism, we type-check unrelated packages in
+// parallel, where "unrelated" means not ordered by the partial order of
+// the import dependency graph.
+//
+// We use a concurrency-safe non-blocking cache (importer.imported) to
+// record the results of type-checking, whether success or failure. An
+// entry is created in this cache by startLoad the first time the
+// package is imported. The first goroutine to request an entry becomes
+// responsible for completing the task and broadcasting completion to
+// subsequent requestors, which block until then.
+//
+// Type checking occurs in (parallel) postorder: we cannot type-check a
+// set of files until we have loaded and type-checked all of their
+// immediate dependencies (and thus all of their transitive
+// dependencies). If the input were guaranteed free of import cycles,
+// this would be trivial: we could simply wait for completion of the
+// dependencies and then invoke the typechecker.
+//
+// But as we saw in the 'go test' section above, some cycles in the
+// import graph over packages are actually legal, so long as the
+// cycle-forming edge originates in the in-package test files that
+// augment the package. This explains why the nodes of the import
+// dependency graph are not packages, but lists of files: the unlabelled
+// nodes avoid the cycles. Consider packages A and B where B imports A
+// and A's in-package tests AT import B. The naively constructed import
+// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
+// the graph over lists of files is AT --> B --> A, where AT is an
+// unlabelled node.
+//
+// Awaiting completion of the dependencies in a cyclic graph would
+// deadlock, so we must materialize the import dependency graph (as
+// importer.graph) and check whether each import edge forms a cycle. If
+// x imports y, and the graph already contains a path from y to x, then
+// there is an import cycle, in which case the processing of x must not
+// wait for the completion of processing of y.
+//
+// When the type-checker makes a callback (doImport) to the loader for a
+// given import edge, there are two possible cases. In the normal case,
+// the dependency has already been completely type-checked; doImport
+// does a cache lookup and returns it. In the cyclic case, the entry in
+// the cache is still necessarily incomplete, indicating a cycle. We
+// perform the cycle check again to obtain the error message, and return
+// the error.
+//
+// The result of using concurrency is about a 2.5x speedup for stdlib_test.
+
+// TODO(adonovan): overhaul the package documentation.
diff --git a/vendor/golang.org/x/tools/go/loader/example_test.go b/vendor/golang.org/x/tools/go/loader/example_test.go
new file mode 100644
index 0000000..88adde3
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/example_test.go
@@ -0,0 +1,179 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.8,!go1.9 // TODO(adonovan) determine which versions we need to test here
+// +build !windows
+
+package loader_test
+
+import (
+ "fmt"
+ "go/token"
+ "log"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/go/loader"
+)
+
+func printProgram(prog *loader.Program) {
+ // Created packages are the initial packages specified by a call
+ // to CreateFromFilenames or CreateFromFiles.
+ var names []string
+ for _, info := range prog.Created {
+ names = append(names, info.Pkg.Path())
+ }
+ fmt.Printf("created: %s\n", names)
+
+ // Imported packages are the initial packages specified by a
+ // call to Import or ImportWithTests.
+ names = nil
+ for _, info := range prog.Imported {
+ if strings.Contains(info.Pkg.Path(), "internal") {
+ continue // skip, to reduce fragility
+ }
+ names = append(names, info.Pkg.Path())
+ }
+ sort.Strings(names)
+ fmt.Printf("imported: %s\n", names)
+
+ // InitialPackages contains the union of created and imported.
+ names = nil
+ for _, info := range prog.InitialPackages() {
+ names = append(names, info.Pkg.Path())
+ }
+ sort.Strings(names)
+ fmt.Printf("initial: %s\n", names)
+
+ // AllPackages contains all initial packages and their dependencies.
+ names = nil
+ for pkg := range prog.AllPackages {
+ names = append(names, pkg.Path())
+ }
+ sort.Strings(names)
+ fmt.Printf("all: %s\n", names)
+}
+
+func printFilenames(fset *token.FileSet, info *loader.PackageInfo) {
+ var names []string
+ for _, f := range info.Files {
+ names = append(names, filepath.Base(fset.File(f.Pos()).Name()))
+ }
+ fmt.Printf("%s.Files: %s\n", info.Pkg.Path(), names)
+}
+
+// This example loads a set of packages and all of their dependencies
+// from a typical command-line. FromArgs parses a command line and
+// makes calls to the other methods of Config shown in the examples that
+// follow.
+func ExampleConfig_FromArgs() {
+ args := []string{"mytool", "unicode/utf8", "errors", "runtime", "--", "foo", "bar"}
+ const wantTests = false
+
+ var conf loader.Config
+ rest, err := conf.FromArgs(args[1:], wantTests)
+ prog, err := conf.Load()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ fmt.Printf("rest: %s\n", rest)
+ printProgram(prog)
+ // Output:
+ // rest: [foo bar]
+ // created: []
+ // imported: [errors runtime unicode/utf8]
+ // initial: [errors runtime unicode/utf8]
+ // all: [errors runtime runtime/internal/atomic runtime/internal/sys unicode/utf8 unsafe]
+}
+
+// This example creates and type-checks a single package (without tests)
+// from a list of filenames, and loads all of its dependencies.
+// (The input files are actually only a small part of the math/cmplx package.)
+func ExampleConfig_CreateFromFilenames() {
+ var conf loader.Config
+ conf.CreateFromFilenames("math/cmplx",
+ filepath.Join(runtime.GOROOT(), "src/math/cmplx/abs.go"),
+ filepath.Join(runtime.GOROOT(), "src/math/cmplx/sin.go"))
+ prog, err := conf.Load()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ printProgram(prog)
+ // Output:
+ // created: [math/cmplx]
+ // imported: []
+ // initial: [math/cmplx]
+ // all: [math math/cmplx unsafe]
+}
+
+// In the examples below, for stability, the chosen packages are
+// relatively small, platform-independent, and low-level (and thus
+// infrequently changing).
+// The strconv package has internal and external tests.
+
+const hello = `package main
+
+import "fmt"
+
+func main() {
+ fmt.Println("Hello, world.")
+}
+`
+
+// This example creates and type-checks a package from a list of
+// already-parsed files, and loads all its dependencies.
+func ExampleConfig_CreateFromFiles() {
+ var conf loader.Config
+ f, err := conf.ParseFile("hello.go", hello)
+ if err != nil {
+ log.Fatal(err)
+ }
+ conf.CreateFromFiles("hello", f)
+ prog, err := conf.Load()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ printProgram(prog)
+ printFilenames(prog.Fset, prog.Package("strconv"))
+ // Output:
+ // created: [hello]
+ // imported: []
+ // initial: [hello]
+ // all: [errors fmt hello internal/race io math os reflect runtime runtime/internal/atomic runtime/internal/sys strconv sync sync/atomic syscall time unicode/utf8 unsafe]
+ // strconv.Files: [atob.go atof.go atoi.go decimal.go doc.go extfloat.go ftoa.go isprint.go itoa.go quote.go]
+}
+
+// This example imports three packages, including the tests for one of
+// them, and loads all their dependencies.
+func ExampleConfig_Import() {
+ // ImportWithTest("strconv") causes strconv to include
+ // internal_test.go, and creates an external test package,
+ // strconv_test.
+ // (Compare with the example of CreateFromFiles.)
+
+ var conf loader.Config
+ conf.Import("unicode/utf8")
+ conf.Import("errors")
+ conf.ImportWithTests("strconv")
+ prog, err := conf.Load()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ printProgram(prog)
+ printFilenames(prog.Fset, prog.Package("strconv"))
+ printFilenames(prog.Fset, prog.Package("strconv_test"))
+ // Output:
+ // created: [strconv_test]
+ // imported: [errors strconv unicode/utf8]
+ // initial: [errors strconv strconv_test unicode/utf8]
+ // all: [bufio bytes errors flag fmt internal/race io log math math/rand os reflect runtime runtime/debug runtime/internal/atomic runtime/internal/sys runtime/trace sort strconv strconv_test strings sync sync/atomic syscall testing time unicode unicode/utf8 unsafe]
+ // strconv.Files: [atob.go atof.go atoi.go decimal.go doc.go extfloat.go ftoa.go isprint.go itoa.go quote.go internal_test.go]
+ // strconv_test.Files: [atob_test.go atof_test.go atoi_test.go decimal_test.go example_test.go fp_test.go ftoa_test.go itoa_test.go quote_test.go strconv_test.go]
+}
diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go
new file mode 100644
index 0000000..de756f7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/loader.go
@@ -0,0 +1,1077 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+var ignoreVendor build.ImportMode
+
+const trace = false // show timing info for type-checking
+
+// Config specifies the configuration for loading a whole program from
+// Go source code.
+// The zero value for Config is a ready-to-use default configuration.
+type Config struct {
+ // Fset is the file set for the parser to use when loading the
+ // program. If nil, it may be lazily initialized by any
+ // method of Config.
+ Fset *token.FileSet
+
+ // ParserMode specifies the mode to be used by the parser when
+ // loading source packages.
+ ParserMode parser.Mode
+
+ // TypeChecker contains options relating to the type checker.
+ //
+ // The supplied IgnoreFuncBodies is not used; the effective
+ // value comes from the TypeCheckFuncBodies func below.
+ // The supplied Import function is not used either.
+ TypeChecker types.Config
+
+ // TypeCheckFuncBodies is a predicate over package paths.
+ // A package for which the predicate is false will
+ // have its package-level declarations type checked, but not
+ // its function bodies; this can be used to quickly load
+ // dependencies from source. If nil, all func bodies are type
+ // checked.
+ TypeCheckFuncBodies func(path string) bool
+
+ // If Build is non-nil, it is used to locate source packages.
+ // Otherwise &build.Default is used.
+ //
+ // By default, cgo is invoked to preprocess Go files that
+ // import the fake package "C". This behaviour can be
+ // disabled by setting CGO_ENABLED=0 in the environment prior
+ // to startup, or by setting Build.CgoEnabled=false.
+ Build *build.Context
+
+ // The current directory, used for resolving relative package
+ // references such as "./go/loader". If empty, os.Getwd will be
+ // used instead.
+ Cwd string
+
+ // If DisplayPath is non-nil, it is used to transform each
+ // file name obtained from Build.Import(). This can be used
+ // to prevent a virtualized build.Config's file names from
+ // leaking into the user interface.
+ DisplayPath func(path string) string
+
+ // If AllowErrors is true, Load will return a Program even
+ // if some of the its packages contained I/O, parser or type
+ // errors; such errors are accessible via PackageInfo.Errors. If
+ // false, Load will fail if any package had an error.
+ AllowErrors bool
+
+ // CreatePkgs specifies a list of non-importable initial
+ // packages to create. The resulting packages will appear in
+ // the corresponding elements of the Program.Created slice.
+ CreatePkgs []PkgSpec
+
+ // ImportPkgs specifies a set of initial packages to load.
+ // The map keys are package paths.
+ //
+ // The map value indicates whether to load tests. If true, Load
+ // will add and type-check two lists of files to the package:
+ // non-test files followed by in-package *_test.go files. In
+ // addition, it will append the external test package (if any)
+ // to Program.Created.
+ ImportPkgs map[string]bool
+
+ // FindPackage is called during Load to create the build.Package
+ // for a given import path from a given directory.
+ // If FindPackage is nil, (*build.Context).Import is used.
+ // A client may use this hook to adapt to a proprietary build
+ // system that does not follow the "go build" layout
+ // conventions, for example.
+ //
+ // It must be safe to call concurrently from multiple goroutines.
+ FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error)
+
+ // AfterTypeCheck is called immediately after a list of files
+ // has been type-checked and appended to info.Files.
+ //
+ // This optional hook function is the earliest opportunity for
+ // the client to observe the output of the type checker,
+ // which may be useful to reduce analysis latency when loading
+ // a large program.
+ //
+ // The function is permitted to modify info.Info, for instance
+ // to clear data structures that are no longer needed, which can
+ // dramatically reduce peak memory consumption.
+ //
+ // The function may be called twice for the same PackageInfo:
+ // once for the files of the package and again for the
+ // in-package test files.
+ //
+ // It must be safe to call concurrently from multiple goroutines.
+ AfterTypeCheck func(info *PackageInfo, files []*ast.File)
+}
+
+// A PkgSpec specifies a non-importable package to be created by Load.
+// Files are processed first, but typically only one of Files and
+// Filenames is provided. The path needn't be globally unique.
+//
+// For vendoring purposes, the package's directory is the one that
+// contains the first file.
+type PkgSpec struct {
+ Path string // package path ("" => use package declaration)
+ Files []*ast.File // ASTs of already-parsed files
+ Filenames []string // names of files to be parsed
+}
+
+// A Program is a Go program loaded from source as specified by a Config.
+type Program struct {
+ Fset *token.FileSet // the file set for this program
+
+ // Created[i] contains the initial package whose ASTs or
+ // filenames were supplied by Config.CreatePkgs[i], followed by
+ // the external test package, if any, of each package in
+ // Config.ImportPkgs ordered by ImportPath.
+ //
+ // NOTE: these files must not import "C". Cgo preprocessing is
+ // only performed on imported packages, not ad hoc packages.
+ //
+ // TODO(adonovan): we need to copy and adapt the logic of
+ // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make
+ // Config.Import and Config.Create methods return the same kind
+ // of entity, essentially a build.Package.
+ // Perhaps we can even reuse that type directly.
+ Created []*PackageInfo
+
+ // Imported contains the initially imported packages,
+ // as specified by Config.ImportPkgs.
+ Imported map[string]*PackageInfo
+
+ // AllPackages contains the PackageInfo of every package
+ // encountered by Load: all initial packages and all
+ // dependencies, including incomplete ones.
+ AllPackages map[*types.Package]*PackageInfo
+
+ // importMap is the canonical mapping of package paths to
+ // packages. It contains all Imported initial packages, but not
+ // Created ones, and all imported dependencies.
+ importMap map[string]*types.Package
+}
+
+// PackageInfo holds the ASTs and facts derived by the type-checker
+// for a single package.
+//
+// Not mutated once exposed via the API.
+//
+type PackageInfo struct {
+ Pkg *types.Package
+ Importable bool // true if 'import "Pkg.Path()"' would resolve to this
+ TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors
+ Files []*ast.File // syntax trees for the package's files
+ Errors []error // non-nil if the package had errors
+ types.Info // type-checker deductions.
+ dir string // package directory
+
+ checker *types.Checker // transient type-checker state
+ errorFunc func(error)
+}
+
+func (info *PackageInfo) String() string { return info.Pkg.Path() }
+
+func (info *PackageInfo) appendError(err error) {
+ if info.errorFunc != nil {
+ info.errorFunc(err)
+ } else {
+ fmt.Fprintln(os.Stderr, err)
+ }
+ info.Errors = append(info.Errors, err)
+}
+
+func (conf *Config) fset() *token.FileSet {
+ if conf.Fset == nil {
+ conf.Fset = token.NewFileSet()
+ }
+ return conf.Fset
+}
+
+// ParseFile is a convenience function (intended for testing) that invokes
+// the parser using the Config's FileSet, which is initialized if nil.
+//
+// src specifies the parser input as a string, []byte, or io.Reader, and
+// filename is its apparent name. If src is nil, the contents of
+// filename are read from the file system.
+//
+func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
+ // TODO(adonovan): use conf.build() etc like parseFiles does.
+ return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
+}
+
+// FromArgsUsage is a partial usage message that applications calling
+// FromArgs may wish to include in their -help output.
+const FromArgsUsage = `
+<args> is a list of arguments denoting a set of initial packages.
+It may take one of two forms:
+
+1. A list of *.go source files.
+
+ All of the specified files are loaded, parsed and type-checked
+ as a single package. All the files must belong to the same directory.
+
+2. A list of import paths, each denoting a package.
+
+ The package's directory is found relative to the $GOROOT and
+ $GOPATH using similar logic to 'go build', and the *.go files in
+ that directory are loaded, parsed and type-checked as a single
+ package.
+
+ In addition, all *_test.go files in the directory are then loaded
+ and parsed. Those files whose package declaration equals that of
+ the non-*_test.go files are included in the primary package. Test
+ files whose package declaration ends with "_test" are type-checked
+ as another package, the 'external' test package, so that a single
+ import path may denote two packages. (Whether this behaviour is
+ enabled is tool-specific, and may depend on additional flags.)
+
+A '--' argument terminates the list of packages.
+`
+
+// FromArgs interprets args as a set of initial packages to load from
+// source and updates the configuration. It returns the list of
+// unconsumed arguments.
+//
+// It is intended for use in command-line interfaces that require a
+// set of initial packages to be specified; see FromArgsUsage message
+// for details.
+//
+// Only superficial errors are reported at this stage; errors dependent
+// on I/O are detected during Load.
+//
+func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
+ var rest []string
+ for i, arg := range args {
+ if arg == "--" {
+ rest = args[i+1:]
+ args = args[:i]
+ break // consume "--" and return the remaining args
+ }
+ }
+
+ if len(args) > 0 && strings.HasSuffix(args[0], ".go") {
+ // Assume args is a list of a *.go files
+ // denoting a single ad hoc package.
+ for _, arg := range args {
+ if !strings.HasSuffix(arg, ".go") {
+ return nil, fmt.Errorf("named files must be .go files: %s", arg)
+ }
+ }
+ conf.CreateFromFilenames("", args...)
+ } else {
+ // Assume args are directories each denoting a
+ // package and (perhaps) an external test, iff xtest.
+ for _, arg := range args {
+ if xtest {
+ conf.ImportWithTests(arg)
+ } else {
+ conf.Import(arg)
+ }
+ }
+ }
+
+ return rest, nil
+}
+
+// CreateFromFilenames is a convenience function that adds
+// a conf.CreatePkgs entry to create a package of the specified *.go
+// files.
+//
+func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
+ conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
+}
+
+// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
+// entry to create package of the specified path and parsed files.
+//
+func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
+ conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
+}
+
+// ImportWithTests is a convenience function that adds path to
+// ImportPkgs, the set of initial source packages located relative to
+// $GOPATH. The package will be augmented by any *_test.go files in
+// its directory that contain a "package x" (not "package x_test")
+// declaration.
+//
+// In addition, if any *_test.go files contain a "package x_test"
+// declaration, an additional package comprising just those files will
+// be added to CreatePkgs.
+//
+func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
+
+// Import is a convenience function that adds path to ImportPkgs, the
+// set of initial packages that will be imported from source.
+//
+func (conf *Config) Import(path string) { conf.addImport(path, false) }
+
+func (conf *Config) addImport(path string, tests bool) {
+ if path == "C" {
+ return // ignore; not a real package
+ }
+ if conf.ImportPkgs == nil {
+ conf.ImportPkgs = make(map[string]bool)
+ }
+ conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests
+}
+
+// PathEnclosingInterval returns the PackageInfo and ast.Node that
+// contain source interval [start, end), and all the node's ancestors
+// up to the AST root. It searches all ast.Files of all packages in prog.
+// exact is defined as for astutil.PathEnclosingInterval.
+//
+// The zero value is returned if not found.
+//
+func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
+ for _, info := range prog.AllPackages {
+ for _, f := range info.Files {
+ if f.Pos() == token.NoPos {
+ // This can happen if the parser saw
+ // too many errors and bailed out.
+ // (Use parser.AllErrors to prevent that.)
+ continue
+ }
+ if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
+ continue
+ }
+ if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
+ return info, path, exact
+ }
+ }
+ }
+ return nil, nil, false
+}
+
+// InitialPackages returns a new slice containing the set of initial
+// packages (Created + Imported) in unspecified order.
+//
+func (prog *Program) InitialPackages() []*PackageInfo {
+ infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
+ infos = append(infos, prog.Created...)
+ for _, info := range prog.Imported {
+ infos = append(infos, info)
+ }
+ return infos
+}
+
+// Package returns the ASTs and results of type checking for the
+// specified package.
+func (prog *Program) Package(path string) *PackageInfo {
+ if info, ok := prog.AllPackages[prog.importMap[path]]; ok {
+ return info
+ }
+ for _, info := range prog.Created {
+ if path == info.Pkg.Path() {
+ return info
+ }
+ }
+ return nil
+}
+
+// ---------- Implementation ----------
+
+// importer holds the working state of the algorithm.
+type importer struct {
+ conf *Config // the client configuration
+ start time.Time // for logging
+
+ progMu sync.Mutex // guards prog
+ prog *Program // the resulting program
+
+ // findpkg is a memoization of FindPackage.
+ findpkgMu sync.Mutex // guards findpkg
+ findpkg map[findpkgKey]*findpkgValue
+
+ importedMu sync.Mutex // guards imported
+ imported map[string]*importInfo // all imported packages (incl. failures) by import path
+
+ // import dependency graph: graph[x][y] => x imports y
+ //
+ // Since non-importable packages cannot be cyclic, we ignore
+ // their imports, thus we only need the subgraph over importable
+ // packages. Nodes are identified by their import paths.
+ graphMu sync.Mutex
+ graph map[string]map[string]bool
+}
+
+type findpkgKey struct {
+ importPath string
+ fromDir string
+ mode build.ImportMode
+}
+
+type findpkgValue struct {
+ ready chan struct{} // closed to broadcast readiness
+ bp *build.Package
+ err error
+}
+
+// importInfo tracks the success or failure of a single import.
+//
+// Upon completion, exactly one of info and err is non-nil:
+// info on successful creation of a package, err otherwise.
+// A successful package may still contain type errors.
+//
+type importInfo struct {
+ path string // import path
+ info *PackageInfo // results of typechecking (including errors)
+ complete chan struct{} // closed to broadcast that info is set.
+}
+
+// awaitCompletion blocks until ii is complete,
+// i.e. the info field is safe to inspect.
+func (ii *importInfo) awaitCompletion() {
+ <-ii.complete // wait for close
+}
+
+// Complete marks ii as complete.
+// Its info and err fields will not be subsequently updated.
+func (ii *importInfo) Complete(info *PackageInfo) {
+ if info == nil {
+ panic("info == nil")
+ }
+ ii.info = info
+ close(ii.complete)
+}
+
+type importError struct {
+ path string // import path
+ err error // reason for failure to create a package
+}
+
+// Load creates the initial packages specified by conf.{Create,Import}Pkgs,
+// loading their dependencies packages as needed.
+//
+// On success, Load returns a Program containing a PackageInfo for
+// each package. On failure, it returns an error.
+//
+// If AllowErrors is true, Load will return a Program even if some
+// packages contained I/O, parser or type errors, or if dependencies
+// were missing. (Such errors are accessible via PackageInfo.Errors. If
+// false, Load will fail if any package had an error.
+//
+// It is an error if no packages were loaded.
+//
+func (conf *Config) Load() (*Program, error) {
+ // Create a simple default error handler for parse/type errors.
+ if conf.TypeChecker.Error == nil {
+ conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) }
+ }
+
+ // Set default working directory for relative package references.
+ if conf.Cwd == "" {
+ var err error
+ conf.Cwd, err = os.Getwd()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Install default FindPackage hook using go/build logic.
+ if conf.FindPackage == nil {
+ conf.FindPackage = (*build.Context).Import
+ }
+
+ prog := &Program{
+ Fset: conf.fset(),
+ Imported: make(map[string]*PackageInfo),
+ importMap: make(map[string]*types.Package),
+ AllPackages: make(map[*types.Package]*PackageInfo),
+ }
+
+ imp := importer{
+ conf: conf,
+ prog: prog,
+ findpkg: make(map[findpkgKey]*findpkgValue),
+ imported: make(map[string]*importInfo),
+ start: time.Now(),
+ graph: make(map[string]map[string]bool),
+ }
+
+ // -- loading proper (concurrent phase) --------------------------------
+
+ var errpkgs []string // packages that contained errors
+
+ // Load the initially imported packages and their dependencies,
+ // in parallel.
+ // No vendor check on packages imported from the command line.
+ infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor)
+ for _, ie := range importErrors {
+ conf.TypeChecker.Error(ie.err) // failed to create package
+ errpkgs = append(errpkgs, ie.path)
+ }
+ for _, info := range infos {
+ prog.Imported[info.Pkg.Path()] = info
+ }
+
+ // Augment the designated initial packages by their tests.
+ // Dependencies are loaded in parallel.
+ var xtestPkgs []*build.Package
+ for importPath, augment := range conf.ImportPkgs {
+ if !augment {
+ continue
+ }
+
+ // No vendor check on packages imported from command line.
+ bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor)
+ if err != nil {
+ // Package not found, or can't even parse package declaration.
+ // Already reported by previous loop; ignore it.
+ continue
+ }
+
+ // Needs external test package?
+ if len(bp.XTestGoFiles) > 0 {
+ xtestPkgs = append(xtestPkgs, bp)
+ }
+
+ // Consult the cache using the canonical package path.
+ path := bp.ImportPath
+ imp.importedMu.Lock() // (unnecessary, we're sequential here)
+ ii, ok := imp.imported[path]
+ // Paranoid checks added due to issue #11012.
+ if !ok {
+ // Unreachable.
+ // The previous loop called importAll and thus
+ // startLoad for each path in ImportPkgs, which
+ // populates imp.imported[path] with a non-zero value.
+ panic(fmt.Sprintf("imported[%q] not found", path))
+ }
+ if ii == nil {
+ // Unreachable.
+ // The ii values in this loop are the same as in
+ // the previous loop, which enforced the invariant
+ // that at least one of ii.err and ii.info is non-nil.
+ panic(fmt.Sprintf("imported[%q] == nil", path))
+ }
+ if ii.info == nil {
+ // Unreachable.
+ // awaitCompletion has the postcondition
+ // ii.info != nil.
+ panic(fmt.Sprintf("imported[%q].info = nil", path))
+ }
+ info := ii.info
+ imp.importedMu.Unlock()
+
+ // Parse the in-package test files.
+ files, errs := imp.conf.parsePackageFiles(bp, 't')
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ // The test files augmenting package P cannot be imported,
+ // but may import packages that import P,
+ // so we must disable the cycle check.
+ imp.addFiles(info, files, false)
+ }
+
+ createPkg := func(path, dir string, files []*ast.File, errs []error) {
+ info := imp.newPackageInfo(path, dir)
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ // Ad hoc packages are non-importable,
+ // so no cycle check is needed.
+ // addFiles loads dependencies in parallel.
+ imp.addFiles(info, files, false)
+ prog.Created = append(prog.Created, info)
+ }
+
+ // Create packages specified by conf.CreatePkgs.
+ for _, cp := range conf.CreatePkgs {
+ files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode)
+ files = append(files, cp.Files...)
+
+ path := cp.Path
+ if path == "" {
+ if len(files) > 0 {
+ path = files[0].Name.Name
+ } else {
+ path = "(unnamed)"
+ }
+ }
+
+ dir := conf.Cwd
+ if len(files) > 0 && files[0].Pos().IsValid() {
+ dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name())
+ }
+ createPkg(path, dir, files, errs)
+ }
+
+ // Create external test packages.
+ sort.Sort(byImportPath(xtestPkgs))
+ for _, bp := range xtestPkgs {
+ files, errs := imp.conf.parsePackageFiles(bp, 'x')
+ createPkg(bp.ImportPath+"_test", bp.Dir, files, errs)
+ }
+
+ // -- finishing up (sequential) ----------------------------------------
+
+ if len(prog.Imported)+len(prog.Created) == 0 {
+ return nil, errors.New("no initial packages were loaded")
+ }
+
+ // Create infos for indirectly imported packages.
+ // e.g. incomplete packages without syntax, loaded from export data.
+ for _, obj := range prog.importMap {
+ info := prog.AllPackages[obj]
+ if info == nil {
+ prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true}
+ } else {
+ // finished
+ info.checker = nil
+ info.errorFunc = nil
+ }
+ }
+
+ if !conf.AllowErrors {
+ // Report errors in indirectly imported packages.
+ for _, info := range prog.AllPackages {
+ if len(info.Errors) > 0 {
+ errpkgs = append(errpkgs, info.Pkg.Path())
+ }
+ }
+ if errpkgs != nil {
+ var more string
+ if len(errpkgs) > 3 {
+ more = fmt.Sprintf(" and %d more", len(errpkgs)-3)
+ errpkgs = errpkgs[:3]
+ }
+ return nil, fmt.Errorf("couldn't load packages due to errors: %s%s",
+ strings.Join(errpkgs, ", "), more)
+ }
+ }
+
+ markErrorFreePackages(prog.AllPackages)
+
+ return prog, nil
+}
+
+type byImportPath []*build.Package
+
+func (b byImportPath) Len() int { return len(b) }
+func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath }
+func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+
+// markErrorFreePackages sets the TransitivelyErrorFree flag on all
+// applicable packages.
+func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) {
+ // Build the transpose of the import graph.
+ importedBy := make(map[*types.Package]map[*types.Package]bool)
+ for P := range allPackages {
+ for _, Q := range P.Imports() {
+ clients, ok := importedBy[Q]
+ if !ok {
+ clients = make(map[*types.Package]bool)
+ importedBy[Q] = clients
+ }
+ clients[P] = true
+ }
+ }
+
+ // Find all packages reachable from some error package.
+ reachable := make(map[*types.Package]bool)
+ var visit func(*types.Package)
+ visit = func(p *types.Package) {
+ if !reachable[p] {
+ reachable[p] = true
+ for q := range importedBy[p] {
+ visit(q)
+ }
+ }
+ }
+ for _, info := range allPackages {
+ if len(info.Errors) > 0 {
+ visit(info.Pkg)
+ }
+ }
+
+ // Mark the others as "transitively error-free".
+ for _, info := range allPackages {
+ if !reachable[info.Pkg] {
+ info.TransitivelyErrorFree = true
+ }
+ }
+}
+
+// build returns the effective build context.
+func (conf *Config) build() *build.Context {
+ if conf.Build != nil {
+ return conf.Build
+ }
+ return &build.Default
+}
+
+// parsePackageFiles enumerates the files belonging to package path,
+// then loads, parses and returns them, plus a list of I/O or parse
+// errors that were encountered.
+//
+// 'which' indicates which files to include:
+// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
+// 't': include in-package *_test.go source files (TestGoFiles)
+// 'x': include external *_test.go source files. (XTestGoFiles)
+//
+func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
+ if bp.ImportPath == "unsafe" {
+ return nil, nil
+ }
+ var filenames []string
+ switch which {
+ case 'g':
+ filenames = bp.GoFiles
+ case 't':
+ filenames = bp.TestGoFiles
+ case 'x':
+ filenames = bp.XTestGoFiles
+ default:
+ panic(which)
+ }
+
+ files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode)
+
+ // Preprocess CgoFiles and parse the outputs (sequentially).
+ if which == 'g' && bp.CgoFiles != nil {
+ cgofiles, err := processCgoFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode)
+ if err != nil {
+ errs = append(errs, err)
+ } else {
+ files = append(files, cgofiles...)
+ }
+ }
+
+ return files, errs
+}
+
+// doImport imports the package denoted by path.
+// It implements the types.Importer signature.
+//
+// It returns an error if a package could not be created
+// (e.g. go/build or parse error), but type errors are reported via
+// the types.Config.Error callback (the first of which is also saved
+// in the package's PackageInfo).
+//
+// Idempotent.
+//
+func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
+ if to == "C" {
+ // This should be unreachable, but ad hoc packages are
+ // not currently subject to cgo preprocessing.
+ // See https://github.com/golang/go/issues/11627.
+ return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`,
+ from.Pkg.Path())
+ }
+
+ bp, err := imp.findPackage(to, from.dir, 0)
+ if err != nil {
+ return nil, err
+ }
+
+ // The standard unsafe package is handled specially,
+ // and has no PackageInfo.
+ if bp.ImportPath == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // Look for the package in the cache using its canonical path.
+ path := bp.ImportPath
+ imp.importedMu.Lock()
+ ii := imp.imported[path]
+ imp.importedMu.Unlock()
+ if ii == nil {
+ panic("internal error: unexpected import: " + path)
+ }
+ if ii.info != nil {
+ return ii.info.Pkg, nil
+ }
+
+ // Import of incomplete package: this indicates a cycle.
+ fromPath := from.Pkg.Path()
+ if cycle := imp.findPath(path, fromPath); cycle != nil {
+ cycle = append([]string{fromPath}, cycle...)
+ return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
+ }
+
+ panic("internal error: import of incomplete (yet acyclic) package: " + fromPath)
+}
+
+// findPackage locates the package denoted by the importPath in the
+// specified directory.
+func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) {
+ // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7)
+ // to avoid holding the lock around FindPackage.
+ key := findpkgKey{importPath, fromDir, mode}
+ imp.findpkgMu.Lock()
+ v, ok := imp.findpkg[key]
+ if ok {
+ // cache hit
+ imp.findpkgMu.Unlock()
+
+ <-v.ready // wait for entry to become ready
+ } else {
+ // Cache miss: this goroutine becomes responsible for
+ // populating the map entry and broadcasting its readiness.
+ v = &findpkgValue{ready: make(chan struct{})}
+ imp.findpkg[key] = v
+ imp.findpkgMu.Unlock()
+
+ ioLimit <- true
+ v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode)
+ <-ioLimit
+
+ if _, ok := v.err.(*build.NoGoError); ok {
+ v.err = nil // empty directory is not an error
+ }
+
+ close(v.ready) // broadcast ready condition
+ }
+ return v.bp, v.err
+}
+
+// importAll loads, parses, and type-checks the specified packages in
+// parallel and returns their completed importInfos in unspecified order.
+//
+// fromPath is the package path of the importing package, if it is
+// importable, "" otherwise. It is used for cycle detection.
+//
+// fromDir is the directory containing the import declaration that
+// caused these imports.
+//
+func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
+ // TODO(adonovan): opt: do the loop in parallel once
+ // findPackage is non-blocking.
+ var pending []*importInfo
+ for importPath := range imports {
+ bp, err := imp.findPackage(importPath, fromDir, mode)
+ if err != nil {
+ errors = append(errors, importError{
+ path: importPath,
+ err: err,
+ })
+ continue
+ }
+ pending = append(pending, imp.startLoad(bp))
+ }
+
+ if fromPath != "" {
+ // We're loading a set of imports.
+ //
+ // We must record graph edges from the importing package
+ // to its dependencies, and check for cycles.
+ imp.graphMu.Lock()
+ deps, ok := imp.graph[fromPath]
+ if !ok {
+ deps = make(map[string]bool)
+ imp.graph[fromPath] = deps
+ }
+ for _, ii := range pending {
+ deps[ii.path] = true
+ }
+ imp.graphMu.Unlock()
+ }
+
+ for _, ii := range pending {
+ if fromPath != "" {
+ if cycle := imp.findPath(ii.path, fromPath); cycle != nil {
+ // Cycle-forming import: we must not await its
+ // completion since it would deadlock.
+ //
+ // We don't record the error in ii since
+ // the error is really associated with the
+ // cycle-forming edge, not the package itself.
+ // (Also it would complicate the
+ // invariants of importPath completion.)
+ if trace {
+ fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle)
+ }
+ continue
+ }
+ }
+ ii.awaitCompletion()
+ infos = append(infos, ii.info)
+ }
+
+ return infos, errors
+}
+
+// findPath returns an arbitrary path from 'from' to 'to' in the import
+// graph, or nil if there was none.
+func (imp *importer) findPath(from, to string) []string {
+ imp.graphMu.Lock()
+ defer imp.graphMu.Unlock()
+
+ seen := make(map[string]bool)
+ var search func(stack []string, importPath string) []string
+ search = func(stack []string, importPath string) []string {
+ if !seen[importPath] {
+ seen[importPath] = true
+ stack = append(stack, importPath)
+ if importPath == to {
+ return stack
+ }
+ for x := range imp.graph[importPath] {
+ if p := search(stack, x); p != nil {
+ return p
+ }
+ }
+ }
+ return nil
+ }
+ return search(make([]string, 0, 20), from)
+}
+
+// startLoad initiates the loading, parsing and type-checking of the
+// specified package and its dependencies, if it has not already begun.
+//
+// It returns an importInfo, not necessarily in a completed state. The
+// caller must call awaitCompletion() before accessing its info field.
+//
+// startLoad is concurrency-safe and idempotent.
+//
+func (imp *importer) startLoad(bp *build.Package) *importInfo {
+ path := bp.ImportPath
+ imp.importedMu.Lock()
+ ii, ok := imp.imported[path]
+ if !ok {
+ ii = &importInfo{path: path, complete: make(chan struct{})}
+ imp.imported[path] = ii
+ go func() {
+ info := imp.load(bp)
+ ii.Complete(info)
+ }()
+ }
+ imp.importedMu.Unlock()
+
+ return ii
+}
+
+// load implements package loading by parsing Go source files
+// located by go/build.
+func (imp *importer) load(bp *build.Package) *PackageInfo {
+ info := imp.newPackageInfo(bp.ImportPath, bp.Dir)
+ info.Importable = true
+ files, errs := imp.conf.parsePackageFiles(bp, 'g')
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ imp.addFiles(info, files, true)
+
+ imp.progMu.Lock()
+ imp.prog.importMap[bp.ImportPath] = info.Pkg
+ imp.progMu.Unlock()
+
+ return info
+}
+
+// addFiles adds and type-checks the specified files to info, loading
+// their dependencies if needed. The order of files determines the
+// package initialization order. It may be called multiple times on the
+// same package. Errors are appended to the info.Errors field.
+//
+// cycleCheck determines whether the imports within files create
+// dependency edges that should be checked for potential cycles.
+//
+func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
+ // Ensure the dependencies are loaded, in parallel.
+ var fromPath string
+ if cycleCheck {
+ fromPath = info.Pkg.Path()
+ }
+ // TODO(adonovan): opt: make the caller do scanImports.
+ // Callers with a build.Package can skip it.
+ imp.importAll(fromPath, info.dir, scanImports(files), 0)
+
+ if trace {
+ fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n",
+ time.Since(imp.start), info.Pkg.Path(), len(files))
+ }
+
+ // Don't call checker.Files on Unsafe, even with zero files,
+ // because it would mutate the package, which is a global.
+ if info.Pkg == types.Unsafe {
+ if len(files) > 0 {
+ panic(`"unsafe" package contains unexpected files`)
+ }
+ } else {
+ // Ignore the returned (first) error since we
+ // already collect them all in the PackageInfo.
+ info.checker.Files(files)
+ info.Files = append(info.Files, files...)
+ }
+
+ if imp.conf.AfterTypeCheck != nil {
+ imp.conf.AfterTypeCheck(info, files)
+ }
+
+ if trace {
+ fmt.Fprintf(os.Stderr, "%s: stop %q\n",
+ time.Since(imp.start), info.Pkg.Path())
+ }
+}
+
+func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
+ var pkg *types.Package
+ if path == "unsafe" {
+ pkg = types.Unsafe
+ } else {
+ pkg = types.NewPackage(path, "")
+ }
+ info := &PackageInfo{
+ Pkg: pkg,
+ Info: types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ },
+ errorFunc: imp.conf.TypeChecker.Error,
+ dir: dir,
+ }
+
+ // Copy the types.Config so we can vary it across PackageInfos.
+ tc := imp.conf.TypeChecker
+ tc.IgnoreFuncBodies = false
+ if f := imp.conf.TypeCheckFuncBodies; f != nil {
+ tc.IgnoreFuncBodies = !f(path)
+ }
+ tc.Importer = closure{imp, info}
+ tc.Error = info.appendError // appendError wraps the user's Error function
+
+ info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info)
+ imp.progMu.Lock()
+ imp.prog.AllPackages[pkg] = info
+ imp.progMu.Unlock()
+ return info
+}
+
+type closure struct {
+ imp *importer
+ info *PackageInfo
+}
+
+func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) }
diff --git a/vendor/golang.org/x/tools/go/loader/loader_test.go b/vendor/golang.org/x/tools/go/loader/loader_test.go
new file mode 100644
index 0000000..02630eb
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/loader_test.go
@@ -0,0 +1,816 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// No testdata on Android.
+
+// +build !android
+
+package loader_test
+
+import (
+ "fmt"
+ "go/build"
+ "go/constant"
+ "go/types"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strings"
+ "sync"
+ "testing"
+
+ "golang.org/x/tools/go/buildutil"
+ "golang.org/x/tools/go/loader"
+)
+
+// TestFromArgs checks that conf.FromArgs populates conf correctly.
+// It does no I/O.
+func TestFromArgs(t *testing.T) {
+ type result struct {
+ Err string
+ Rest []string
+ ImportPkgs map[string]bool
+ CreatePkgs []loader.PkgSpec
+ }
+ for _, test := range []struct {
+ args []string
+ tests bool
+ want result
+ }{
+ // Mix of existing and non-existent packages.
+ {
+ args: []string{"nosuchpkg", "errors"},
+ want: result{
+ ImportPkgs: map[string]bool{"errors": false, "nosuchpkg": false},
+ },
+ },
+ // Same, with -test flag.
+ {
+ args: []string{"nosuchpkg", "errors"},
+ tests: true,
+ want: result{
+ ImportPkgs: map[string]bool{"errors": true, "nosuchpkg": true},
+ },
+ },
+ // Surplus arguments.
+ {
+ args: []string{"fmt", "errors", "--", "surplus"},
+ want: result{
+ Rest: []string{"surplus"},
+ ImportPkgs: map[string]bool{"errors": false, "fmt": false},
+ },
+ },
+ // Ad hoc package specified as *.go files.
+ {
+ args: []string{"foo.go", "bar.go"},
+ want: result{CreatePkgs: []loader.PkgSpec{{
+ Filenames: []string{"foo.go", "bar.go"},
+ }}},
+ },
+ // Mixture of *.go and import paths.
+ {
+ args: []string{"foo.go", "fmt"},
+ want: result{
+ Err: "named files must be .go files: fmt",
+ },
+ },
+ } {
+ var conf loader.Config
+ rest, err := conf.FromArgs(test.args, test.tests)
+ got := result{
+ Rest: rest,
+ ImportPkgs: conf.ImportPkgs,
+ CreatePkgs: conf.CreatePkgs,
+ }
+ if err != nil {
+ got.Err = err.Error()
+ }
+ if !reflect.DeepEqual(got, test.want) {
+ t.Errorf("FromArgs(%q) = %+v, want %+v", test.args, got, test.want)
+ }
+ }
+}
+
+func TestLoad_NoInitialPackages(t *testing.T) {
+ var conf loader.Config
+
+ const wantErr = "no initial packages were loaded"
+
+ prog, err := conf.Load()
+ if err == nil {
+ t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
+ } else if err.Error() != wantErr {
+ t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
+ }
+ if prog != nil {
+ t.Errorf("Load unexpectedly returned a Program")
+ }
+}
+
+func TestLoad_MissingInitialPackage(t *testing.T) {
+ var conf loader.Config
+ conf.Import("nosuchpkg")
+ conf.Import("errors")
+
+ const wantErr = "couldn't load packages due to errors: nosuchpkg"
+
+ prog, err := conf.Load()
+ if err == nil {
+ t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
+ } else if err.Error() != wantErr {
+ t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
+ }
+ if prog != nil {
+ t.Errorf("Load unexpectedly returned a Program")
+ }
+}
+
+func TestLoad_MissingInitialPackage_AllowErrors(t *testing.T) {
+ var conf loader.Config
+ conf.AllowErrors = true
+ conf.Import("nosuchpkg")
+ conf.ImportWithTests("errors")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed unexpectedly: %v", err)
+ }
+ if prog == nil {
+ t.Fatalf("Load returned a nil Program")
+ }
+ if got, want := created(prog), "errors_test"; got != want {
+ t.Errorf("Created = %s, want %s", got, want)
+ }
+ if got, want := imported(prog), "errors"; got != want {
+ t.Errorf("Imported = %s, want %s", got, want)
+ }
+}
+
+func TestCreateUnnamedPackage(t *testing.T) {
+ var conf loader.Config
+ conf.CreateFromFilenames("")
+ prog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load failed: %v", err)
+ }
+ if got, want := fmt.Sprint(prog.InitialPackages()), "[(unnamed)]"; got != want {
+ t.Errorf("InitialPackages = %s, want %s", got, want)
+ }
+}
+
+func TestLoad_MissingFileInCreatedPackage(t *testing.T) {
+ var conf loader.Config
+ conf.CreateFromFilenames("", "missing.go")
+
+ const wantErr = "couldn't load packages due to errors: (unnamed)"
+
+ prog, err := conf.Load()
+ if prog != nil {
+ t.Errorf("Load unexpectedly returned a Program")
+ }
+ if err == nil {
+ t.Fatalf("Load succeeded unexpectedly, want %q", wantErr)
+ }
+ if err.Error() != wantErr {
+ t.Fatalf("Load failed with wrong error %q, want %q", err, wantErr)
+ }
+}
+
+func TestLoad_MissingFileInCreatedPackage_AllowErrors(t *testing.T) {
+ conf := loader.Config{AllowErrors: true}
+ conf.CreateFromFilenames("", "missing.go")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed: %v", err)
+ }
+ if got, want := fmt.Sprint(prog.InitialPackages()), "[(unnamed)]"; got != want {
+ t.Fatalf("InitialPackages = %s, want %s", got, want)
+ }
+}
+
+func TestLoad_ParseError(t *testing.T) {
+ var conf loader.Config
+ conf.CreateFromFilenames("badpkg", "testdata/badpkgdecl.go")
+
+ const wantErr = "couldn't load packages due to errors: badpkg"
+
+ prog, err := conf.Load()
+ if prog != nil {
+ t.Errorf("Load unexpectedly returned a Program")
+ }
+ if err == nil {
+ t.Fatalf("Load succeeded unexpectedly, want %q", wantErr)
+ }
+ if err.Error() != wantErr {
+ t.Fatalf("Load failed with wrong error %q, want %q", err, wantErr)
+ }
+}
+
+func TestLoad_ParseError_AllowErrors(t *testing.T) {
+ var conf loader.Config
+ conf.AllowErrors = true
+ conf.CreateFromFilenames("badpkg", "testdata/badpkgdecl.go")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed unexpectedly: %v", err)
+ }
+ if prog == nil {
+ t.Fatalf("Load returned a nil Program")
+ }
+ if got, want := created(prog), "badpkg"; got != want {
+ t.Errorf("Created = %s, want %s", got, want)
+ }
+
+ badpkg := prog.Created[0]
+ if len(badpkg.Files) != 1 {
+ t.Errorf("badpkg has %d files, want 1", len(badpkg.Files))
+ }
+ wantErr := filepath.Join("testdata", "badpkgdecl.go") + ":1:34: expected 'package', found 'EOF'"
+ if !hasError(badpkg.Errors, wantErr) {
+ t.Errorf("badpkg.Errors = %v, want %s", badpkg.Errors, wantErr)
+ }
+}
+
+func TestLoad_FromSource_Success(t *testing.T) {
+ var conf loader.Config
+ conf.CreateFromFilenames("P", "testdata/a.go", "testdata/b.go")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed unexpectedly: %v", err)
+ }
+ if prog == nil {
+ t.Fatalf("Load returned a nil Program")
+ }
+ if got, want := created(prog), "P"; got != want {
+ t.Errorf("Created = %s, want %s", got, want)
+ }
+}
+
+func TestLoad_FromImports_Success(t *testing.T) {
+ var conf loader.Config
+ conf.ImportWithTests("fmt")
+ conf.ImportWithTests("errors")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed unexpectedly: %v", err)
+ }
+ if prog == nil {
+ t.Fatalf("Load returned a nil Program")
+ }
+ if got, want := created(prog), "errors_test fmt_test"; got != want {
+ t.Errorf("Created = %q, want %s", got, want)
+ }
+ if got, want := imported(prog), "errors fmt"; got != want {
+ t.Errorf("Imported = %s, want %s", got, want)
+ }
+ // Check set of transitive packages.
+ // There are >30 and the set may grow over time, so only check a few.
+ want := map[string]bool{
+ "strings": true,
+ "time": true,
+ "runtime": true,
+ "testing": true,
+ "unicode": true,
+ }
+ for _, path := range all(prog) {
+ delete(want, path)
+ }
+ if len(want) > 0 {
+ t.Errorf("AllPackages is missing these keys: %q", keys(want))
+ }
+}
+
+func TestLoad_MissingIndirectImport(t *testing.T) {
+ pkgs := map[string]string{
+ "a": `package a; import _ "b"`,
+ "b": `package b; import _ "c"`,
+ }
+ conf := loader.Config{Build: fakeContext(pkgs)}
+ conf.Import("a")
+
+ const wantErr = "couldn't load packages due to errors: b"
+
+ prog, err := conf.Load()
+ if err == nil {
+ t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
+ } else if err.Error() != wantErr {
+ t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
+ }
+ if prog != nil {
+ t.Errorf("Load unexpectedly returned a Program")
+ }
+}
+
+func TestLoad_BadDependency_AllowErrors(t *testing.T) {
+ for _, test := range []struct {
+ descr string
+ pkgs map[string]string
+ wantPkgs string
+ }{
+
+ {
+ descr: "missing dependency",
+ pkgs: map[string]string{
+ "a": `package a; import _ "b"`,
+ "b": `package b; import _ "c"`,
+ },
+ wantPkgs: "a b",
+ },
+ {
+ descr: "bad package decl in dependency",
+ pkgs: map[string]string{
+ "a": `package a; import _ "b"`,
+ "b": `package b; import _ "c"`,
+ "c": `package`,
+ },
+ wantPkgs: "a b",
+ },
+ {
+ descr: "parse error in dependency",
+ pkgs: map[string]string{
+ "a": `package a; import _ "b"`,
+ "b": `package b; import _ "c"`,
+ "c": `package c; var x = `,
+ },
+ wantPkgs: "a b c",
+ },
+ } {
+ conf := loader.Config{
+ AllowErrors: true,
+ Build: fakeContext(test.pkgs),
+ }
+ conf.Import("a")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("%s: Load failed unexpectedly: %v", test.descr, err)
+ }
+ if prog == nil {
+ t.Fatalf("%s: Load returned a nil Program", test.descr)
+ }
+
+ if got, want := imported(prog), "a"; got != want {
+ t.Errorf("%s: Imported = %s, want %s", test.descr, got, want)
+ }
+ if got := all(prog); strings.Join(got, " ") != test.wantPkgs {
+ t.Errorf("%s: AllPackages = %s, want %s", test.descr, got, test.wantPkgs)
+ }
+ }
+}
+
+func TestCwd(t *testing.T) {
+ ctxt := fakeContext(map[string]string{"one/two/three": `package three`})
+ for _, test := range []struct {
+ cwd, arg, want string
+ }{
+ {cwd: "/go/src/one", arg: "./two/three", want: "one/two/three"},
+ {cwd: "/go/src/one", arg: "../one/two/three", want: "one/two/three"},
+ {cwd: "/go/src/one", arg: "one/two/three", want: "one/two/three"},
+ {cwd: "/go/src/one/two/three", arg: ".", want: "one/two/three"},
+ {cwd: "/go/src/one", arg: "two/three", want: ""},
+ } {
+ conf := loader.Config{
+ Cwd: test.cwd,
+ Build: ctxt,
+ }
+ conf.Import(test.arg)
+
+ var got string
+ prog, err := conf.Load()
+ if prog != nil {
+ got = imported(prog)
+ }
+ if got != test.want {
+ t.Errorf("Load(%s) from %s: Imported = %s, want %s",
+ test.arg, test.cwd, got, test.want)
+ if err != nil {
+ t.Errorf("Load failed: %v", err)
+ }
+ }
+ }
+}
+
+func TestLoad_vendor(t *testing.T) {
+ pkgs := map[string]string{
+ "a": `package a; import _ "x"`,
+ "a/vendor": ``, // mkdir a/vendor
+ "a/vendor/x": `package xa`,
+ "b": `package b; import _ "x"`,
+ "b/vendor": ``, // mkdir b/vendor
+ "b/vendor/x": `package xb`,
+ "c": `package c; import _ "x"`,
+ "x": `package xc`,
+ }
+ conf := loader.Config{Build: fakeContext(pkgs)}
+ conf.Import("a")
+ conf.Import("b")
+ conf.Import("c")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // Check that a, b, and c see different versions of x.
+ for _, r := range "abc" {
+ name := string(r)
+ got := prog.Package(name).Pkg.Imports()[0]
+ want := "x" + name
+ if got.Name() != want {
+ t.Errorf("package %s import %q = %s, want %s",
+ name, "x", got.Name(), want)
+ }
+ }
+}
+
+func TestVendorCwd(t *testing.T) {
+ // Test the interaction of cwd and vendor directories.
+ ctxt := fakeContext(map[string]string{
+ "net": ``, // mkdir net
+ "net/http": `package http; import _ "hpack"`,
+ "vendor": ``, // mkdir vendor
+ "vendor/hpack": `package vendorhpack`,
+ "hpack": `package hpack`,
+ })
+ for i, test := range []struct {
+ cwd, arg, want string
+ }{
+ {cwd: "/go/src/net", arg: "http"}, // not found
+ {cwd: "/go/src/net", arg: "./http", want: "net/http vendor/hpack"},
+ {cwd: "/go/src/net", arg: "hpack", want: "vendor/hpack"},
+ {cwd: "/go/src/vendor", arg: "hpack", want: "vendor/hpack"},
+ {cwd: "/go/src/vendor", arg: "./hpack", want: "vendor/hpack"},
+ } {
+ conf := loader.Config{
+ Cwd: test.cwd,
+ Build: ctxt,
+ }
+ conf.Import(test.arg)
+
+ var got string
+ prog, err := conf.Load()
+ if prog != nil {
+ got = strings.Join(all(prog), " ")
+ }
+ if got != test.want {
+ t.Errorf("#%d: Load(%s) from %s: got %s, want %s",
+ i, test.arg, test.cwd, got, test.want)
+ if err != nil {
+ t.Errorf("Load failed: %v", err)
+ }
+ }
+ }
+}
+
+func TestVendorCwdIssue16580(t *testing.T) {
+ // Regression test for Go issue 16580.
+ // Import decls in "created" packages were vendor-resolved
+ // w.r.t. cwd, not the parent directory of the package's files.
+ ctxt := fakeContext(map[string]string{
+ "a": ``, // mkdir a
+ "a/vendor": ``, // mkdir a/vendor
+ "a/vendor/b": `package b; const X = true`,
+ "b": `package b; const X = false`,
+ })
+ for _, test := range []struct {
+ filename, cwd string
+ want bool // expected value of b.X; depends on filename, not on cwd
+ }{
+ {filename: "c.go", cwd: "/go/src", want: false},
+ {filename: "c.go", cwd: "/go/src/a", want: false},
+ {filename: "c.go", cwd: "/go/src/a/b", want: false},
+ {filename: "c.go", cwd: "/go/src/a/vendor/b", want: false},
+
+ {filename: "/go/src/a/c.go", cwd: "/go/src", want: true},
+ {filename: "/go/src/a/c.go", cwd: "/go/src/a", want: true},
+ {filename: "/go/src/a/c.go", cwd: "/go/src/a/b", want: true},
+ {filename: "/go/src/a/c.go", cwd: "/go/src/a/vendor/b", want: true},
+
+ {filename: "/go/src/c/c.go", cwd: "/go/src", want: false},
+ {filename: "/go/src/c/c.go", cwd: "/go/src/a", want: false},
+ {filename: "/go/src/c/c.go", cwd: "/go/src/a/b", want: false},
+ {filename: "/go/src/c/c.go", cwd: "/go/src/a/vendor/b", want: false},
+ } {
+ conf := loader.Config{
+ Cwd: test.cwd,
+ Build: ctxt,
+ }
+ f, err := conf.ParseFile(test.filename, `package dummy; import "b"; const X = b.X`)
+ if err != nil {
+ t.Fatal(f)
+ }
+ conf.CreateFromFiles("dummy", f)
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("%+v: Load failed: %v", test, err)
+ continue
+ }
+
+ x := constant.BoolVal(prog.Created[0].Pkg.Scope().Lookup("X").(*types.Const).Val())
+ if x != test.want {
+ t.Errorf("%+v: b.X = %t", test, x)
+ }
+ }
+
+ // TODO(adonovan): also test imports within XTestGoFiles.
+}
+
+// TODO(adonovan): more Load tests:
+//
+// failures:
+// - to parse package decl of *_test.go files
+// - to parse package decl of external *_test.go files
+// - to parse whole of *_test.go files
+// - to parse whole of external *_test.go files
+// - to open a *.go file during import scanning
+// - to import from binary
+
+// features:
+// - InitialPackages
+// - PackageCreated hook
+// - TypeCheckFuncBodies hook
+
+func TestTransitivelyErrorFreeFlag(t *testing.T) {
+ // Create an minimal custom build.Context
+ // that fakes the following packages:
+ //
+ // a --> b --> c! c has an error
+ // \ d and e are transitively error-free.
+ // e --> d
+ //
+ // Each package [a-e] consists of one file, x.go.
+ pkgs := map[string]string{
+ "a": `package a; import (_ "b"; _ "e")`,
+ "b": `package b; import _ "c"`,
+ "c": `package c; func f() { _ = int(false) }`, // type error within function body
+ "d": `package d;`,
+ "e": `package e; import _ "d"`,
+ }
+ conf := loader.Config{
+ AllowErrors: true,
+ Build: fakeContext(pkgs),
+ }
+ conf.Import("a")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed: %s", err)
+ }
+ if prog == nil {
+ t.Fatalf("Load returned nil *Program")
+ }
+
+ for pkg, info := range prog.AllPackages {
+ var wantErr, wantTEF bool
+ switch pkg.Path() {
+ case "a", "b":
+ case "c":
+ wantErr = true
+ case "d", "e":
+ wantTEF = true
+ default:
+ t.Errorf("unexpected package: %q", pkg.Path())
+ continue
+ }
+
+ if (info.Errors != nil) != wantErr {
+ if wantErr {
+ t.Errorf("Package %q.Error = nil, want error", pkg.Path())
+ } else {
+ t.Errorf("Package %q has unexpected Errors: %v",
+ pkg.Path(), info.Errors)
+ }
+ }
+
+ if info.TransitivelyErrorFree != wantTEF {
+ t.Errorf("Package %q.TransitivelyErrorFree=%t, want %t",
+ pkg.Path(), info.TransitivelyErrorFree, wantTEF)
+ }
+ }
+}
+
+// Test that syntax (scan/parse), type, and loader errors are recorded
+// (in PackageInfo.Errors) and reported (via Config.TypeChecker.Error).
+func TestErrorReporting(t *testing.T) {
+ pkgs := map[string]string{
+ "a": `package a; import (_ "b"; _ "c"); var x int = false`,
+ "b": `package b; 'syntax error!`,
+ }
+ conf := loader.Config{
+ AllowErrors: true,
+ Build: fakeContext(pkgs),
+ }
+ var mu sync.Mutex
+ var allErrors []error
+ conf.TypeChecker.Error = func(err error) {
+ mu.Lock()
+ allErrors = append(allErrors, err)
+ mu.Unlock()
+ }
+ conf.Import("a")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed: %s", err)
+ }
+ if prog == nil {
+ t.Fatalf("Load returned nil *Program")
+ }
+
+ // TODO(adonovan): test keys of ImportMap.
+
+ // Check errors recorded in each PackageInfo.
+ for pkg, info := range prog.AllPackages {
+ switch pkg.Path() {
+ case "a":
+ if !hasError(info.Errors, "cannot convert false") {
+ t.Errorf("a.Errors = %v, want bool conversion (type) error", info.Errors)
+ }
+ if !hasError(info.Errors, "could not import c") {
+ t.Errorf("a.Errors = %v, want import (loader) error", info.Errors)
+ }
+ case "b":
+ if !hasError(info.Errors, "rune literal not terminated") {
+ t.Errorf("b.Errors = %v, want unterminated literal (syntax) error", info.Errors)
+ }
+ }
+ }
+
+ // Check errors reported via error handler.
+ if !hasError(allErrors, "cannot convert false") ||
+ !hasError(allErrors, "rune literal not terminated") ||
+ !hasError(allErrors, "could not import c") {
+ t.Errorf("allErrors = %v, want syntax, type and loader errors", allErrors)
+ }
+}
+
+func TestCycles(t *testing.T) {
+ for _, test := range []struct {
+ descr string
+ ctxt *build.Context
+ wantErr string
+ }{
+ {
+ "self-cycle",
+ fakeContext(map[string]string{
+ "main": `package main; import _ "selfcycle"`,
+ "selfcycle": `package selfcycle; import _ "selfcycle"`,
+ }),
+ `import cycle: selfcycle -> selfcycle`,
+ },
+ {
+ "three-package cycle",
+ fakeContext(map[string]string{
+ "main": `package main; import _ "a"`,
+ "a": `package a; import _ "b"`,
+ "b": `package b; import _ "c"`,
+ "c": `package c; import _ "a"`,
+ }),
+ `import cycle: c -> a -> b -> c`,
+ },
+ {
+ "self-cycle in dependency of test file",
+ buildutil.FakeContext(map[string]map[string]string{
+ "main": {
+ "main.go": `package main`,
+ "main_test.go": `package main; import _ "a"`,
+ },
+ "a": {
+ "a.go": `package a; import _ "a"`,
+ },
+ }),
+ `import cycle: a -> a`,
+ },
+ // TODO(adonovan): fix: these fail
+ // {
+ // "two-package cycle in dependency of test file",
+ // buildutil.FakeContext(map[string]map[string]string{
+ // "main": {
+ // "main.go": `package main`,
+ // "main_test.go": `package main; import _ "a"`,
+ // },
+ // "a": {
+ // "a.go": `package a; import _ "main"`,
+ // },
+ // }),
+ // `import cycle: main -> a -> main`,
+ // },
+ // {
+ // "self-cycle in augmented package",
+ // buildutil.FakeContext(map[string]map[string]string{
+ // "main": {
+ // "main.go": `package main`,
+ // "main_test.go": `package main; import _ "main"`,
+ // },
+ // }),
+ // `import cycle: main -> main`,
+ // },
+ } {
+ conf := loader.Config{
+ AllowErrors: true,
+ Build: test.ctxt,
+ }
+ var mu sync.Mutex
+ var allErrors []error
+ conf.TypeChecker.Error = func(err error) {
+ mu.Lock()
+ allErrors = append(allErrors, err)
+ mu.Unlock()
+ }
+ conf.ImportWithTests("main")
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("%s: Load failed: %s", test.descr, err)
+ }
+ if prog == nil {
+ t.Fatalf("%s: Load returned nil *Program", test.descr)
+ }
+
+ if !hasError(allErrors, test.wantErr) {
+ t.Errorf("%s: Load() errors = %q, want %q",
+ test.descr, allErrors, test.wantErr)
+ }
+ }
+
+ // TODO(adonovan):
+ // - Test that in a legal test cycle, none of the symbols
+ // defined by augmentation are visible via import.
+}
+
+// ---- utilities ----
+
+// Simplifying wrapper around buildutil.FakeContext for single-file packages.
+func fakeContext(pkgs map[string]string) *build.Context {
+ pkgs2 := make(map[string]map[string]string)
+ for path, content := range pkgs {
+ pkgs2[path] = map[string]string{"x.go": content}
+ }
+ return buildutil.FakeContext(pkgs2)
+}
+
+func hasError(errors []error, substr string) bool {
+ for _, err := range errors {
+ if strings.Contains(err.Error(), substr) {
+ return true
+ }
+ }
+ return false
+}
+
+func keys(m map[string]bool) (keys []string) {
+ for key := range m {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+ return
+}
+
+// Returns all loaded packages.
+func all(prog *loader.Program) []string {
+ var pkgs []string
+ for _, info := range prog.AllPackages {
+ pkgs = append(pkgs, info.Pkg.Path())
+ }
+ sort.Strings(pkgs)
+ return pkgs
+}
+
+// Returns initially imported packages, as a string.
+func imported(prog *loader.Program) string {
+ var pkgs []string
+ for _, info := range prog.Imported {
+ pkgs = append(pkgs, info.Pkg.Path())
+ }
+ sort.Strings(pkgs)
+ return strings.Join(pkgs, " ")
+}
+
+// Returns initially created packages, as a string.
+func created(prog *loader.Program) string {
+ var pkgs []string
+ for _, info := range prog.Created {
+ pkgs = append(pkgs, info.Pkg.Path())
+ }
+ return strings.Join(pkgs, " ")
+}
+
+// Load package "io" twice in parallel.
+// When run with -race, this is a regression test for Go issue 20718, in
+// which the global "unsafe" package was modified concurrently.
+func TestLoad1(t *testing.T) { loadIO(t) }
+func TestLoad2(t *testing.T) { loadIO(t) }
+
+func loadIO(t *testing.T) {
+ t.Parallel()
+ conf := &loader.Config{ImportPkgs: map[string]bool{"io": false}}
+ if _, err := conf.Load(); err != nil {
+ t.Fatal(err)
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/loader/stdlib_test.go b/vendor/golang.org/x/tools/go/loader/stdlib_test.go
new file mode 100644
index 0000000..2cd066f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/stdlib_test.go
@@ -0,0 +1,201 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader_test
+
+// This file enumerates all packages beneath $GOROOT, loads them, plus
+// their external tests if any, runs the type checker on them, and
+// prints some summary information.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/token"
+ "go/types"
+ "io/ioutil"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+ "time"
+
+ "golang.org/x/tools/go/buildutil"
+ "golang.org/x/tools/go/loader"
+)
+
+func TestStdlib(t *testing.T) {
+ if runtime.GOOS == "android" {
+ t.Skipf("incomplete std lib on %s", runtime.GOOS)
+ }
+ if testing.Short() {
+ t.Skip("skipping in short mode; uses tons of memory (golang.org/issue/14113)")
+ }
+
+ runtime.GC()
+ t0 := time.Now()
+ var memstats runtime.MemStats
+ runtime.ReadMemStats(&memstats)
+ alloc := memstats.Alloc
+
+ // Load, parse and type-check the program.
+ ctxt := build.Default // copy
+ ctxt.GOPATH = "" // disable GOPATH
+ conf := loader.Config{Build: &ctxt}
+ for _, path := range buildutil.AllPackages(conf.Build) {
+ conf.ImportWithTests(path)
+ }
+
+ prog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load failed: %v", err)
+ }
+
+ t1 := time.Now()
+ runtime.GC()
+ runtime.ReadMemStats(&memstats)
+
+ numPkgs := len(prog.AllPackages)
+ if want := 205; numPkgs < want {
+ t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
+ }
+
+ // Dump package members.
+ if false {
+ for pkg := range prog.AllPackages {
+ fmt.Printf("Package %s:\n", pkg.Path())
+ scope := pkg.Scope()
+ qualifier := types.RelativeTo(pkg)
+ for _, name := range scope.Names() {
+ if ast.IsExported(name) {
+ fmt.Printf("\t%s\n", types.ObjectString(scope.Lookup(name), qualifier))
+ }
+ }
+ fmt.Println()
+ }
+ }
+
+ // Check that Test functions for io/ioutil, regexp and
+ // compress/bzip2 are all simultaneously present.
+ // (The apparent cycle formed when augmenting all three of
+ // these packages by their tests was the original motivation
+ // for reporting b/7114.)
+ //
+ // compress/bzip2.TestBitReader in bzip2_test.go imports io/ioutil
+ // io/ioutil.TestTempFile in tempfile_test.go imports regexp
+ // regexp.TestRE2Search in exec_test.go imports compress/bzip2
+ for _, test := range []struct{ pkg, fn string }{
+ {"io/ioutil", "TestTempFile"},
+ {"regexp", "TestRE2Search"},
+ {"compress/bzip2", "TestBitReader"},
+ } {
+ info := prog.Imported[test.pkg]
+ if info == nil {
+ t.Errorf("failed to load package %q", test.pkg)
+ continue
+ }
+ obj, _ := info.Pkg.Scope().Lookup(test.fn).(*types.Func)
+ if obj == nil {
+ t.Errorf("package %q has no func %q", test.pkg, test.fn)
+ continue
+ }
+ }
+
+ // Dump some statistics.
+
+ // determine line count
+ var lineCount int
+ prog.Fset.Iterate(func(f *token.File) bool {
+ lineCount += f.LineCount()
+ return true
+ })
+
+ t.Log("GOMAXPROCS: ", runtime.GOMAXPROCS(0))
+ t.Log("#Source lines: ", lineCount)
+ t.Log("Load/parse/typecheck: ", t1.Sub(t0))
+ t.Log("#MB: ", int64(memstats.Alloc-alloc)/1000000)
+}
+
+func TestCgoOption(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping in short mode; uses tons of memory (golang.org/issue/14113)")
+ }
+ switch runtime.GOOS {
+ // On these systems, the net and os/user packages don't use cgo
+ // or the std library is incomplete (Android).
+ case "android", "plan9", "solaris", "windows":
+ t.Skipf("no cgo or incomplete std lib on %s", runtime.GOOS)
+ }
+ // In nocgo builds (e.g. linux-amd64-nocgo),
+ // there is no "runtime/cgo" package,
+ // so cgo-generated Go files will have a failing import.
+ if !build.Default.CgoEnabled {
+ return
+ }
+ // Test that we can load cgo-using packages with
+ // CGO_ENABLED=[01], which causes go/build to select pure
+ // Go/native implementations, respectively, based on build
+ // tags.
+ //
+ // Each entry specifies a package-level object and the generic
+ // file expected to define it when cgo is disabled.
+ // When cgo is enabled, the exact file is not specified (since
+ // it varies by platform), but must differ from the generic one.
+ //
+ // The test also loads the actual file to verify that the
+ // object is indeed defined at that location.
+ for _, test := range []struct {
+ pkg, name, genericFile string
+ }{
+ {"net", "cgoLookupHost", "cgo_stub.go"},
+ {"os/user", "current", "lookup_stubs.go"},
+ } {
+ ctxt := build.Default
+ for _, ctxt.CgoEnabled = range []bool{false, true} {
+ conf := loader.Config{Build: &ctxt}
+ conf.Import(test.pkg)
+ prog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed: %v", err)
+ continue
+ }
+ info := prog.Imported[test.pkg]
+ if info == nil {
+ t.Errorf("package %s not found", test.pkg)
+ continue
+ }
+ obj := info.Pkg.Scope().Lookup(test.name)
+ if obj == nil {
+ t.Errorf("no object %s.%s", test.pkg, test.name)
+ continue
+ }
+ posn := prog.Fset.Position(obj.Pos())
+ t.Logf("%s: %s (CgoEnabled=%t)", posn, obj, ctxt.CgoEnabled)
+
+ gotFile := filepath.Base(posn.Filename)
+ filesMatch := gotFile == test.genericFile
+
+ if ctxt.CgoEnabled && filesMatch {
+ t.Errorf("CGO_ENABLED=1: %s found in %s, want native file",
+ obj, gotFile)
+ } else if !ctxt.CgoEnabled && !filesMatch {
+ t.Errorf("CGO_ENABLED=0: %s found in %s, want %s",
+ obj, gotFile, test.genericFile)
+ }
+
+ // Load the file and check the object is declared at the right place.
+ b, err := ioutil.ReadFile(posn.Filename)
+ if err != nil {
+ t.Errorf("can't read %s: %s", posn.Filename, err)
+ continue
+ }
+ line := string(bytes.Split(b, []byte("\n"))[posn.Line-1])
+ ident := line[posn.Column-1:]
+ if !strings.HasPrefix(ident, test.name) {
+ t.Errorf("%s: %s not declared here (looking at %q)", posn, obj, ident)
+ }
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/loader/testdata/a.go b/vendor/golang.org/x/tools/go/loader/testdata/a.go
new file mode 100644
index 0000000..bae3955
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/testdata/a.go
@@ -0,0 +1 @@
+package P
diff --git a/vendor/golang.org/x/tools/go/loader/testdata/b.go b/vendor/golang.org/x/tools/go/loader/testdata/b.go
new file mode 100644
index 0000000..bae3955
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/testdata/b.go
@@ -0,0 +1 @@
+package P
diff --git a/vendor/golang.org/x/tools/go/loader/testdata/badpkgdecl.go b/vendor/golang.org/x/tools/go/loader/testdata/badpkgdecl.go
new file mode 100644
index 0000000..1e39359
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/testdata/badpkgdecl.go
@@ -0,0 +1 @@
+// this file has no package decl
diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go
new file mode 100644
index 0000000..7f38dd7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/util.go
@@ -0,0 +1,124 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+import (
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io"
+ "os"
+ "strconv"
+ "sync"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 10)
+
+// parseFiles parses the Go source files within directory dir and
+// returns the ASTs of the ones that could be at least partially parsed,
+// along with a list of I/O and parse errors encountered.
+//
+// I/O is done via ctxt, which may specify a virtual file system.
+// displayPath is used to transform the filenames attached to the ASTs.
+//
+func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
+ if displayPath == nil {
+ displayPath = func(path string) string { return path }
+ }
+ var wg sync.WaitGroup
+ n := len(files)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range files {
+ if !buildutil.IsAbsPath(ctxt, file) {
+ file = buildutil.JoinPath(ctxt, dir, file)
+ }
+ wg.Add(1)
+ go func(i int, file string) {
+ ioLimit <- true // wait
+ defer func() {
+ wg.Done()
+ <-ioLimit // signal
+ }()
+ var rd io.ReadCloser
+ var err error
+ if ctxt.OpenFile != nil {
+ rd, err = ctxt.OpenFile(file)
+ } else {
+ rd, err = os.Open(file)
+ }
+ if err != nil {
+ errors[i] = err // open failed
+ return
+ }
+
+ // ParseFile may return both an AST and an error.
+ parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
+ rd.Close()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// scanImports returns the set of all import paths from all
+// import specs in the specified files.
+func scanImports(files []*ast.File) map[string]bool {
+ imports := make(map[string]bool)
+ for _, f := range files {
+ for _, decl := range f.Decls {
+ if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
+ for _, spec := range decl.Specs {
+ spec := spec.(*ast.ImportSpec)
+
+ // NB: do not assume the program is well-formed!
+ path, err := strconv.Unquote(spec.Path.Value)
+ if err != nil {
+ continue // quietly ignore the error
+ }
+ if path == "C" {
+ continue // skip pseudopackage
+ }
+ imports[path] = true
+ }
+ }
+ }
+ }
+ return imports
+}
+
+// ---------- Internal helpers ----------
+
+// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
+func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
+ p := int(pos)
+ base := f.Base()
+ return base <= p && p < base+f.Size()
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/blockopt.go b/vendor/golang.org/x/tools/go/ssa/blockopt.go
new file mode 100644
index 0000000..e79260a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/blockopt.go
@@ -0,0 +1,187 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// Simple block optimizations to simplify the control flow graph.
+
+// TODO(adonovan): opt: instead of creating several "unreachable" blocks
+// per function in the Builder, reuse a single one (e.g. at Blocks[1])
+// to reduce garbage.
+
+import (
+ "fmt"
+ "os"
+)
+
+// If true, perform sanity checking and show progress at each
+// successive iteration of optimizeBlocks. Very verbose.
+const debugBlockOpt = false
+
+// markReachable sets Index=-1 for all blocks reachable from b.
+func markReachable(b *BasicBlock) {
+ b.Index = -1
+ for _, succ := range b.Succs {
+ if succ.Index == 0 {
+ markReachable(succ)
+ }
+ }
+}
+
+// deleteUnreachableBlocks marks all reachable blocks of f and
+// eliminates (nils) all others, including possibly cyclic subgraphs.
+//
+func deleteUnreachableBlocks(f *Function) {
+ const white, black = 0, -1
+ // We borrow b.Index temporarily as the mark bit.
+ for _, b := range f.Blocks {
+ b.Index = white
+ }
+ markReachable(f.Blocks[0])
+ if f.Recover != nil {
+ markReachable(f.Recover)
+ }
+ for i, b := range f.Blocks {
+ if b.Index == white {
+ for _, c := range b.Succs {
+ if c.Index == black {
+ c.removePred(b) // delete white->black edge
+ }
+ }
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "unreachable", b)
+ }
+ f.Blocks[i] = nil // delete b
+ }
+ }
+ f.removeNilBlocks()
+}
+
+// jumpThreading attempts to apply simple jump-threading to block b,
+// in which a->b->c become a->c if b is just a Jump.
+// The result is true if the optimization was applied.
+//
+func jumpThreading(f *Function, b *BasicBlock) bool {
+ if b.Index == 0 {
+ return false // don't apply to entry block
+ }
+ if b.Instrs == nil {
+ return false
+ }
+ if _, ok := b.Instrs[0].(*Jump); !ok {
+ return false // not just a jump
+ }
+ c := b.Succs[0]
+ if c == b {
+ return false // don't apply to degenerate jump-to-self.
+ }
+ if c.hasPhi() {
+ return false // not sound without more effort
+ }
+ for j, a := range b.Preds {
+ a.replaceSucc(b, c)
+
+ // If a now has two edges to c, replace its degenerate If by Jump.
+ if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c {
+ jump := new(Jump)
+ jump.setBlock(a)
+ a.Instrs[len(a.Instrs)-1] = jump
+ a.Succs = a.Succs[:1]
+ c.removePred(b)
+ } else {
+ if j == 0 {
+ c.replacePred(b, a)
+ } else {
+ c.Preds = append(c.Preds, a)
+ }
+ }
+
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c)
+ }
+ }
+ f.Blocks[b.Index] = nil // delete b
+ return true
+}
+
+// fuseBlocks attempts to apply the block fusion optimization to block
+// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1.
+// The result is true if the optimization was applied.
+//
+func fuseBlocks(f *Function, a *BasicBlock) bool {
+ if len(a.Succs) != 1 {
+ return false
+ }
+ b := a.Succs[0]
+ if len(b.Preds) != 1 {
+ return false
+ }
+
+ // Degenerate &&/|| ops may result in a straight-line CFG
+ // containing φ-nodes. (Ideally we'd replace such them with
+ // their sole operand but that requires Referrers, built later.)
+ if b.hasPhi() {
+ return false // not sound without further effort
+ }
+
+ // Eliminate jump at end of A, then copy all of B across.
+ a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...)
+ for _, instr := range b.Instrs {
+ instr.setBlock(a)
+ }
+
+ // A inherits B's successors
+ a.Succs = append(a.succs2[:0], b.Succs...)
+
+ // Fix up Preds links of all successors of B.
+ for _, c := range b.Succs {
+ c.replacePred(b, a)
+ }
+
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "fuseBlocks", a, b)
+ }
+
+ f.Blocks[b.Index] = nil // delete b
+ return true
+}
+
+// optimizeBlocks() performs some simple block optimizations on a
+// completed function: dead block elimination, block fusion, jump
+// threading.
+//
+func optimizeBlocks(f *Function) {
+ deleteUnreachableBlocks(f)
+
+ // Loop until no further progress.
+ changed := true
+ for changed {
+ changed = false
+
+ if debugBlockOpt {
+ f.WriteTo(os.Stderr)
+ mustSanityCheck(f, nil)
+ }
+
+ for _, b := range f.Blocks {
+ // f.Blocks will temporarily contain nils to indicate
+ // deleted blocks; we remove them at the end.
+ if b == nil {
+ continue
+ }
+
+ // Fuse blocks. b->c becomes bc.
+ if fuseBlocks(f, b) {
+ changed = true
+ }
+
+ // a->b->c becomes a->c if b contains only a Jump.
+ if jumpThreading(f, b) {
+ changed = true
+ continue // (b was disconnected)
+ }
+ }
+ }
+ f.removeNilBlocks()
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go
new file mode 100644
index 0000000..44abc5b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/builder.go
@@ -0,0 +1,2379 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the BUILD phase of SSA construction.
+//
+// SSA construction has two phases, CREATE and BUILD. In the CREATE phase
+// (create.go), all packages are constructed and type-checked and
+// definitions of all package members are created, method-sets are
+// computed, and wrapper methods are synthesized.
+// ssa.Packages are created in arbitrary order.
+//
+// In the BUILD phase (builder.go), the builder traverses the AST of
+// each Go source function and generates SSA instructions for the
+// function body. Initializer expressions for package-level variables
+// are emitted to the package's init() function in the order specified
+// by go/types.Info.InitOrder, then code for each function in the
+// package is generated in lexical order.
+// The BUILD phases for distinct packages are independent and are
+// executed in parallel.
+//
+// TODO(adonovan): indeed, building functions is now embarrassingly parallel.
+// Audit for concurrency then benchmark using more goroutines.
+//
+// The builder's and Program's indices (maps) are populated and
+// mutated during the CREATE phase, but during the BUILD phase they
+// remain constant. The sole exception is Prog.methodSets and its
+// related maps, which are protected by a dedicated mutex.
+
+import (
+ "fmt"
+ "go/ast"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "os"
+ "sync"
+)
+
+type opaqueType struct {
+ types.Type
+ name string
+}
+
+func (t *opaqueType) String() string { return t.name }
+
+var (
+ varOk = newVar("ok", tBool)
+ varIndex = newVar("index", tInt)
+
+ // Type constants.
+ tBool = types.Typ[types.Bool]
+ tByte = types.Typ[types.Byte]
+ tInt = types.Typ[types.Int]
+ tInvalid = types.Typ[types.Invalid]
+ tString = types.Typ[types.String]
+ tUntypedNil = types.Typ[types.UntypedNil]
+ tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
+ tEface = types.NewInterface(nil, nil).Complete()
+
+ // SSA Value constants.
+ vZero = intConst(0)
+ vOne = intConst(1)
+ vTrue = NewConst(exact.MakeBool(true), tBool)
+)
+
+// builder holds state associated with the package currently being built.
+// Its methods contain all the logic for AST-to-SSA conversion.
+type builder struct{}
+
+// cond emits to fn code to evaluate boolean condition e and jump
+// to t or f depending on its value, performing various simplifications.
+//
+// Postcondition: fn.currentBlock is nil.
+//
+func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ b.cond(fn, e.X, t, f)
+ return
+
+ case *ast.BinaryExpr:
+ switch e.Op {
+ case token.LAND:
+ ltrue := fn.newBasicBlock("cond.true")
+ b.cond(fn, e.X, ltrue, f)
+ fn.currentBlock = ltrue
+ b.cond(fn, e.Y, t, f)
+ return
+
+ case token.LOR:
+ lfalse := fn.newBasicBlock("cond.false")
+ b.cond(fn, e.X, t, lfalse)
+ fn.currentBlock = lfalse
+ b.cond(fn, e.Y, t, f)
+ return
+ }
+
+ case *ast.UnaryExpr:
+ if e.Op == token.NOT {
+ b.cond(fn, e.X, f, t)
+ return
+ }
+ }
+
+ // A traditional compiler would simplify "if false" (etc) here
+ // but we do not, for better fidelity to the source code.
+ //
+ // The value of a constant condition may be platform-specific,
+ // and may cause blocks that are reachable in some configuration
+ // to be hidden from subsequent analyses such as bug-finding tools.
+ emitIf(fn, b.expr(fn, e), t, f)
+}
+
+// logicalBinop emits code to fn to evaluate e, a &&- or
+// ||-expression whose reified boolean value is wanted.
+// The value is returned.
+//
+func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
+ rhs := fn.newBasicBlock("binop.rhs")
+ done := fn.newBasicBlock("binop.done")
+
+ // T(e) = T(e.X) = T(e.Y) after untyped constants have been
+ // eliminated.
+ // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
+ t := fn.Pkg.typeOf(e)
+
+ var short Value // value of the short-circuit path
+ switch e.Op {
+ case token.LAND:
+ b.cond(fn, e.X, rhs, done)
+ short = NewConst(exact.MakeBool(false), t)
+
+ case token.LOR:
+ b.cond(fn, e.X, done, rhs)
+ short = NewConst(exact.MakeBool(true), t)
+ }
+
+ // Is rhs unreachable?
+ if rhs.Preds == nil {
+ // Simplify false&&y to false, true||y to true.
+ fn.currentBlock = done
+ return short
+ }
+
+ // Is done unreachable?
+ if done.Preds == nil {
+ // Simplify true&&y (or false||y) to y.
+ fn.currentBlock = rhs
+ return b.expr(fn, e.Y)
+ }
+
+ // All edges from e.X to done carry the short-circuit value.
+ var edges []Value
+ for range done.Preds {
+ edges = append(edges, short)
+ }
+
+ // The edge from e.Y to done carries the value of e.Y.
+ fn.currentBlock = rhs
+ edges = append(edges, b.expr(fn, e.Y))
+ emitJump(fn, done)
+ fn.currentBlock = done
+
+ phi := &Phi{Edges: edges, Comment: e.Op.String()}
+ phi.pos = e.OpPos
+ phi.typ = t
+ return done.emit(phi)
+}
+
+// exprN lowers a multi-result expression e to SSA form, emitting code
+// to fn and returning a single Value whose type is a *types.Tuple.
+// The caller must access the components via Extract.
+//
+// Multi-result expressions include CallExprs in a multi-value
+// assignment or return statement, and "value,ok" uses of
+// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
+// is token.ARROW).
+//
+func (b *builder) exprN(fn *Function, e ast.Expr) Value {
+ typ := fn.Pkg.typeOf(e).(*types.Tuple)
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return b.exprN(fn, e.X)
+
+ case *ast.CallExpr:
+ // Currently, no built-in function nor type conversion
+ // has multiple results, so we can avoid some of the
+ // cases for single-valued CallExpr.
+ var c Call
+ b.setCall(fn, e, &c.Call)
+ c.typ = typ
+ return fn.emit(&c)
+
+ case *ast.IndexExpr:
+ mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
+ lookup := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
+ CommaOk: true,
+ }
+ lookup.setType(typ)
+ lookup.setPos(e.Lbrack)
+ return fn.emit(lookup)
+
+ case *ast.TypeAssertExpr:
+ return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen)
+
+ case *ast.UnaryExpr: // must be receive <-
+ unop := &UnOp{
+ Op: token.ARROW,
+ X: b.expr(fn, e.X),
+ CommaOk: true,
+ }
+ unop.setType(typ)
+ unop.setPos(e.OpPos)
+ return fn.emit(unop)
+ }
+ panic(fmt.Sprintf("exprN(%T) in %s", e, fn))
+}
+
+// builtin emits to fn SSA instructions to implement a call to the
+// built-in function obj with the specified arguments
+// and return type. It returns the value defined by the result.
+//
+// The result is nil if no special handling was required; in this case
+// the caller should treat this like an ordinary library function
+// call.
+//
+func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
+ switch obj.Name() {
+ case "make":
+ switch typ.Underlying().(type) {
+ case *types.Slice:
+ n := b.expr(fn, args[1])
+ m := n
+ if len(args) == 3 {
+ m = b.expr(fn, args[2])
+ }
+ if m, ok := m.(*Const); ok {
+ // treat make([]T, n, m) as new([m]T)[:n]
+ cap := m.Int64()
+ at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap)
+ alloc := emitNew(fn, at, pos)
+ alloc.Comment = "makeslice"
+ v := &Slice{
+ X: alloc,
+ High: n,
+ }
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+ }
+ v := &MakeSlice{
+ Len: n,
+ Cap: m,
+ }
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+
+ case *types.Map:
+ var res Value
+ if len(args) == 2 {
+ res = b.expr(fn, args[1])
+ }
+ v := &MakeMap{Reserve: res}
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+
+ case *types.Chan:
+ var sz Value = vZero
+ if len(args) == 2 {
+ sz = b.expr(fn, args[1])
+ }
+ v := &MakeChan{Size: sz}
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+ }
+
+ case "new":
+ alloc := emitNew(fn, deref(typ), pos)
+ alloc.Comment = "new"
+ return alloc
+
+ case "len", "cap":
+ // Special case: len or cap of an array or *array is
+ // based on the type, not the value which may be nil.
+ // We must still evaluate the value, though. (If it
+ // was side-effect free, the whole call would have
+ // been constant-folded.)
+ t := deref(fn.Pkg.typeOf(args[0])).Underlying()
+ if at, ok := t.(*types.Array); ok {
+ b.expr(fn, args[0]) // for effects only
+ return intConst(at.Len())
+ }
+ // Otherwise treat as normal.
+
+ case "panic":
+ fn.emit(&Panic{
+ X: emitConv(fn, b.expr(fn, args[0]), tEface),
+ pos: pos,
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ return vTrue // any non-nil Value will do
+ }
+ return nil // treat all others as a regular function call
+}
+
+// addr lowers a single-result addressable expression e to SSA form,
+// emitting code to fn and returning the location (an lvalue) defined
+// by the expression.
+//
+// If escaping is true, addr marks the base variable of the
+// addressable expression e as being a potentially escaping pointer
+// value. For example, in this code:
+//
+// a := A{
+// b: [1]B{B{c: 1}}
+// }
+// return &a.b[0].c
+//
+// the application of & causes a.b[0].c to have its address taken,
+// which means that ultimately the local variable a must be
+// heap-allocated. This is a simple but very conservative escape
+// analysis.
+//
+// Operations forming potentially escaping pointers include:
+// - &x, including when implicit in method call or composite literals.
+// - a[:] iff a is an array (not *array)
+// - references to variables in lexically enclosing functions.
+//
+func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
+ switch e := e.(type) {
+ case *ast.Ident:
+ if isBlankIdent(e) {
+ return blank{}
+ }
+ obj := fn.Pkg.objectOf(e)
+ v := fn.Prog.packageLevelValue(obj) // var (address)
+ if v == nil {
+ v = fn.lookup(obj, escaping)
+ }
+ return &address{addr: v, pos: e.Pos(), expr: e}
+
+ case *ast.CompositeLit:
+ t := deref(fn.Pkg.typeOf(e))
+ var v *Alloc
+ if escaping {
+ v = emitNew(fn, t, e.Lbrace)
+ } else {
+ v = fn.addLocal(t, e.Lbrace)
+ }
+ v.Comment = "complit"
+ var sb storebuf
+ b.compLit(fn, v, e, true, &sb)
+ sb.emit(fn)
+ return &address{addr: v, pos: e.Lbrace, expr: e}
+
+ case *ast.ParenExpr:
+ return b.addr(fn, e.X, escaping)
+
+ case *ast.SelectorExpr:
+ sel, ok := fn.Pkg.info.Selections[e]
+ if !ok {
+ // qualified identifier
+ return b.addr(fn, e.Sel, escaping)
+ }
+ if sel.Kind() != types.FieldVal {
+ panic(sel)
+ }
+ wantAddr := true
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
+ last := len(sel.Index()) - 1
+ return &address{
+ addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
+ pos: e.Sel.Pos(),
+ expr: e.Sel,
+ }
+
+ case *ast.IndexExpr:
+ var x Value
+ var et types.Type
+ switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
+ case *types.Array:
+ x = b.addr(fn, e.X, escaping).address(fn)
+ et = types.NewPointer(t.Elem())
+ case *types.Pointer: // *array
+ x = b.expr(fn, e.X)
+ et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())
+ case *types.Slice:
+ x = b.expr(fn, e.X)
+ et = types.NewPointer(t.Elem())
+ case *types.Map:
+ return &element{
+ m: b.expr(fn, e.X),
+ k: emitConv(fn, b.expr(fn, e.Index), t.Key()),
+ t: t.Elem(),
+ pos: e.Lbrack,
+ }
+ default:
+ panic("unexpected container type in IndexExpr: " + t.String())
+ }
+ v := &IndexAddr{
+ X: x,
+ Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(et)
+ return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e}
+
+ case *ast.StarExpr:
+ return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
+ }
+
+ panic(fmt.Sprintf("unexpected address expression: %T", e))
+}
+
+type store struct {
+ lhs lvalue
+ rhs Value
+}
+
+type storebuf struct{ stores []store }
+
+func (sb *storebuf) store(lhs lvalue, rhs Value) {
+ sb.stores = append(sb.stores, store{lhs, rhs})
+}
+
+func (sb *storebuf) emit(fn *Function) {
+ for _, s := range sb.stores {
+ s.lhs.store(fn, s.rhs)
+ }
+}
+
+// assign emits to fn code to initialize the lvalue loc with the value
+// of expression e. If isZero is true, assign assumes that loc holds
+// the zero value for its type.
+//
+// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate
+// better code in some cases, e.g., for composite literals in an
+// addressable location.
+//
+// If sb is not nil, assign generates code to evaluate expression e, but
+// not to update loc. Instead, the necessary stores are appended to the
+// storebuf sb so that they can be executed later. This allows correct
+// in-place update of existing variables when the RHS is a composite
+// literal that may reference parts of the LHS.
+//
+func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) {
+ // Can we initialize it in place?
+ if e, ok := unparen(e).(*ast.CompositeLit); ok {
+ // A CompositeLit never evaluates to a pointer,
+ // so if the type of the location is a pointer,
+ // an &-operation is implied.
+ if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
+ if isPointer(loc.typ()) {
+ ptr := b.addr(fn, e, true).address(fn)
+ // copy address
+ if sb != nil {
+ sb.store(loc, ptr)
+ } else {
+ loc.store(fn, ptr)
+ }
+ return
+ }
+ }
+
+ if _, ok := loc.(*address); ok {
+ if isInterface(loc.typ()) {
+ // e.g. var x interface{} = T{...}
+ // Can't in-place initialize an interface value.
+ // Fall back to copying.
+ } else {
+ // x = T{...} or x := T{...}
+ addr := loc.address(fn)
+ if sb != nil {
+ b.compLit(fn, addr, e, isZero, sb)
+ } else {
+ var sb storebuf
+ b.compLit(fn, addr, e, isZero, &sb)
+ sb.emit(fn)
+ }
+
+ // Subtle: emit debug ref for aggregate types only;
+ // slice and map are handled by store ops in compLit.
+ switch loc.typ().Underlying().(type) {
+ case *types.Struct, *types.Array:
+ emitDebugRef(fn, e, addr, true)
+ }
+
+ return
+ }
+ }
+ }
+
+ // simple case: just copy
+ rhs := b.expr(fn, e)
+ if sb != nil {
+ sb.store(loc, rhs)
+ } else {
+ loc.store(fn, rhs)
+ }
+}
+
+// expr lowers a single-result expression e to SSA form, emitting code
+// to fn and returning the Value defined by the expression.
+//
+func (b *builder) expr(fn *Function, e ast.Expr) Value {
+ e = unparen(e)
+
+ tv := fn.Pkg.info.Types[e]
+
+ // Is expression a constant?
+ if tv.Value != nil {
+ return NewConst(tv.Value, tv.Type)
+ }
+
+ var v Value
+ if tv.Addressable() {
+ // Prefer pointer arithmetic ({Index,Field}Addr) followed
+ // by Load over subelement extraction (e.g. Index, Field),
+ // to avoid large copies.
+ v = b.addr(fn, e, false).load(fn)
+ } else {
+ v = b.expr0(fn, e, tv)
+ }
+ if fn.debugInfo() {
+ emitDebugRef(fn, e, v, false)
+ }
+ return v
+}
+
+func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
+ switch e := e.(type) {
+ case *ast.BasicLit:
+ panic("non-constant BasicLit") // unreachable
+
+ case *ast.FuncLit:
+ fn2 := &Function{
+ name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
+ Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
+ pos: e.Type.Func,
+ parent: fn,
+ Pkg: fn.Pkg,
+ Prog: fn.Prog,
+ syntax: e,
+ }
+ fn.AnonFuncs = append(fn.AnonFuncs, fn2)
+ b.buildFunction(fn2)
+ if fn2.FreeVars == nil {
+ return fn2
+ }
+ v := &MakeClosure{Fn: fn2}
+ v.setType(tv.Type)
+ for _, fv := range fn2.FreeVars {
+ v.Bindings = append(v.Bindings, fv.outer)
+ fv.outer = nil
+ }
+ return fn.emit(v)
+
+ case *ast.TypeAssertExpr: // single-result form only
+ return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen)
+
+ case *ast.CallExpr:
+ if fn.Pkg.info.Types[e.Fun].IsType() {
+ // Explicit type conversion, e.g. string(x) or big.Int(x)
+ x := b.expr(fn, e.Args[0])
+ y := emitConv(fn, x, tv.Type)
+ if y != x {
+ switch y := y.(type) {
+ case *Convert:
+ y.pos = e.Lparen
+ case *ChangeType:
+ y.pos = e.Lparen
+ case *MakeInterface:
+ y.pos = e.Lparen
+ }
+ }
+ return y
+ }
+ // Call to "intrinsic" built-ins, e.g. new, make, panic.
+ if id, ok := unparen(e.Fun).(*ast.Ident); ok {
+ if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok {
+ if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil {
+ return v
+ }
+ }
+ }
+ // Regular function call.
+ var v Call
+ b.setCall(fn, e, &v.Call)
+ v.setType(tv.Type)
+ return fn.emit(&v)
+
+ case *ast.UnaryExpr:
+ switch e.Op {
+ case token.AND: // &X --- potentially escaping.
+ addr := b.addr(fn, e.X, true)
+ if _, ok := unparen(e.X).(*ast.StarExpr); ok {
+ // &*p must panic if p is nil (http://golang.org/s/go12nil).
+ // For simplicity, we'll just (suboptimally) rely
+ // on the side effects of a load.
+ // TODO(adonovan): emit dedicated nilcheck.
+ addr.load(fn)
+ }
+ return addr.address(fn)
+ case token.ADD:
+ return b.expr(fn, e.X)
+ case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
+ v := &UnOp{
+ Op: e.Op,
+ X: b.expr(fn, e.X),
+ }
+ v.setPos(e.OpPos)
+ v.setType(tv.Type)
+ return fn.emit(v)
+ default:
+ panic(e.Op)
+ }
+
+ case *ast.BinaryExpr:
+ switch e.Op {
+ case token.LAND, token.LOR:
+ return b.logicalBinop(fn, e)
+ case token.SHL, token.SHR:
+ fallthrough
+ case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+ return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos)
+
+ case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
+ cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
+ // The type of x==y may be UntypedBool.
+ return emitConv(fn, cmp, DefaultType(tv.Type))
+ default:
+ panic("illegal op in BinaryExpr: " + e.Op.String())
+ }
+
+ case *ast.SliceExpr:
+ var low, high, max Value
+ var x Value
+ switch fn.Pkg.typeOf(e.X).Underlying().(type) {
+ case *types.Array:
+ // Potentially escaping.
+ x = b.addr(fn, e.X, true).address(fn)
+ case *types.Basic, *types.Slice, *types.Pointer: // *array
+ x = b.expr(fn, e.X)
+ default:
+ panic("unreachable")
+ }
+ if e.High != nil {
+ high = b.expr(fn, e.High)
+ }
+ if e.Low != nil {
+ low = b.expr(fn, e.Low)
+ }
+ if e.Slice3 {
+ max = b.expr(fn, e.Max)
+ }
+ v := &Slice{
+ X: x,
+ Low: low,
+ High: high,
+ Max: max,
+ }
+ v.setPos(e.Lbrack)
+ v.setType(tv.Type)
+ return fn.emit(v)
+
+ case *ast.Ident:
+ obj := fn.Pkg.info.Uses[e]
+ // Universal built-in or nil?
+ switch obj := obj.(type) {
+ case *types.Builtin:
+ return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
+ case *types.Nil:
+ return nilConst(tv.Type)
+ }
+ // Package-level func or var?
+ if v := fn.Prog.packageLevelValue(obj); v != nil {
+ if _, ok := obj.(*types.Var); ok {
+ return emitLoad(fn, v) // var (address)
+ }
+ return v // (func)
+ }
+ // Local var.
+ return emitLoad(fn, fn.lookup(obj, false)) // var (address)
+
+ case *ast.SelectorExpr:
+ sel, ok := fn.Pkg.info.Selections[e]
+ if !ok {
+ // qualified identifier
+ return b.expr(fn, e.Sel)
+ }
+ switch sel.Kind() {
+ case types.MethodExpr:
+ // (*T).f or T.f, the method f from the method-set of type T.
+ // The result is a "thunk".
+ return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type)
+
+ case types.MethodVal:
+ // e.f where e is an expression and f is a method.
+ // The result is a "bound".
+ obj := sel.Obj().(*types.Func)
+ rt := recvType(obj)
+ wantAddr := isPointer(rt)
+ escaping := true
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
+ if isInterface(rt) {
+ // If v has interface type I,
+ // we must emit a check that v is non-nil.
+ // We use: typeassert v.(I).
+ emitTypeAssert(fn, v, rt, token.NoPos)
+ }
+ c := &MakeClosure{
+ Fn: makeBound(fn.Prog, obj),
+ Bindings: []Value{v},
+ }
+ c.setPos(e.Sel.Pos())
+ c.setType(tv.Type)
+ return fn.emit(c)
+
+ case types.FieldVal:
+ indices := sel.Index()
+ last := len(indices) - 1
+ v := b.expr(fn, e.X)
+ v = emitImplicitSelections(fn, v, indices[:last])
+ v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
+ return v
+ }
+
+ panic("unexpected expression-relative selector")
+
+ case *ast.IndexExpr:
+ switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
+ case *types.Array:
+ // Non-addressable array (in a register).
+ v := &Index{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(t.Elem())
+ return fn.emit(v)
+
+ case *types.Map:
+ // Maps are not addressable.
+ mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
+ v := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(mapt.Elem())
+ return fn.emit(v)
+
+ case *types.Basic: // => string
+ // Strings are not addressable.
+ v := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: b.expr(fn, e.Index),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(tByte)
+ return fn.emit(v)
+
+ case *types.Slice, *types.Pointer: // *array
+ // Addressable slice/array; use IndexAddr and Load.
+ return b.addr(fn, e, false).load(fn)
+
+ default:
+ panic("unexpected container type in IndexExpr: " + t.String())
+ }
+
+ case *ast.CompositeLit, *ast.StarExpr:
+ // Addressable types (lvalues)
+ return b.addr(fn, e, false).load(fn)
+ }
+
+ panic(fmt.Sprintf("unexpected expr: %T", e))
+}
+
+// stmtList emits to fn code for all statements in list.
+func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
+ for _, s := range list {
+ b.stmt(fn, s)
+ }
+}
+
+// receiver emits to fn code for expression e in the "receiver"
+// position of selection e.f (where f may be a field or a method) and
+// returns the effective receiver after applying the implicit field
+// selections of sel.
+//
+// wantAddr requests that the result is an an address. If
+// !sel.Indirect(), this may require that e be built in addr() mode; it
+// must thus be addressable.
+//
+// escaping is defined as per builder.addr().
+//
+func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
+ var v Value
+ if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
+ v = b.addr(fn, e, escaping).address(fn)
+ } else {
+ v = b.expr(fn, e)
+ }
+
+ last := len(sel.Index()) - 1
+ v = emitImplicitSelections(fn, v, sel.Index()[:last])
+ if !wantAddr && isPointer(v.Type()) {
+ v = emitLoad(fn, v)
+ }
+ return v
+}
+
+// setCallFunc populates the function parts of a CallCommon structure
+// (Func, Method, Recv, Args[0]) based on the kind of invocation
+// occurring in e.
+//
+func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ c.pos = e.Lparen
+
+ // Is this a method call?
+ if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
+ sel, ok := fn.Pkg.info.Selections[selector]
+ if ok && sel.Kind() == types.MethodVal {
+ obj := sel.Obj().(*types.Func)
+ recv := recvType(obj)
+ wantAddr := isPointer(recv)
+ escaping := true
+ v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
+ if isInterface(recv) {
+ // Invoke-mode call.
+ c.Value = v
+ c.Method = obj
+ } else {
+ // "Call"-mode call.
+ c.Value = fn.Prog.declaredFunc(obj)
+ c.Args = append(c.Args, v)
+ }
+ return
+ }
+
+ // sel.Kind()==MethodExpr indicates T.f() or (*T).f():
+ // a statically dispatched call to the method f in the
+ // method-set of T or *T. T may be an interface.
+ //
+ // e.Fun would evaluate to a concrete method, interface
+ // wrapper function, or promotion wrapper.
+ //
+ // For now, we evaluate it in the usual way.
+ //
+ // TODO(adonovan): opt: inline expr() here, to make the
+ // call static and to avoid generation of wrappers.
+ // It's somewhat tricky as it may consume the first
+ // actual parameter if the call is "invoke" mode.
+ //
+ // Examples:
+ // type T struct{}; func (T) f() {} // "call" mode
+ // type T interface { f() } // "invoke" mode
+ //
+ // type S struct{ T }
+ //
+ // var s S
+ // S.f(s)
+ // (*S).f(&s)
+ //
+ // Suggested approach:
+ // - consume the first actual parameter expression
+ // and build it with b.expr().
+ // - apply implicit field selections.
+ // - use MethodVal logic to populate fields of c.
+ }
+
+ // Evaluate the function operand in the usual way.
+ c.Value = b.expr(fn, e.Fun)
+}
+
+// emitCallArgs emits to f code for the actual parameters of call e to
+// a (possibly built-in) function of effective type sig.
+// The argument values are appended to args, which is then returned.
+//
+func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
+ // f(x, y, z...): pass slice z straight through.
+ if e.Ellipsis != 0 {
+ for i, arg := range e.Args {
+ v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type())
+ args = append(args, v)
+ }
+ return args
+ }
+
+ offset := len(args) // 1 if call has receiver, 0 otherwise
+
+ // Evaluate actual parameter expressions.
+ //
+ // If this is a chained call of the form f(g()) where g has
+ // multiple return values (MRV), they are flattened out into
+ // args; a suffix of them may end up in a varargs slice.
+ for _, arg := range e.Args {
+ v := b.expr(fn, arg)
+ if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
+ for i, n := 0, ttuple.Len(); i < n; i++ {
+ args = append(args, emitExtract(fn, v, i))
+ }
+ } else {
+ args = append(args, v)
+ }
+ }
+
+ // Actual->formal assignability conversions for normal parameters.
+ np := sig.Params().Len() // number of normal parameters
+ if sig.Variadic() {
+ np--
+ }
+ for i := 0; i < np; i++ {
+ args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type())
+ }
+
+ // Actual->formal assignability conversions for variadic parameter,
+ // and construction of slice.
+ if sig.Variadic() {
+ varargs := args[offset+np:]
+ st := sig.Params().At(np).Type().(*types.Slice)
+ vt := st.Elem()
+ if len(varargs) == 0 {
+ args = append(args, nilConst(st))
+ } else {
+ // Replace a suffix of args with a slice containing it.
+ at := types.NewArray(vt, int64(len(varargs)))
+ a := emitNew(fn, at, token.NoPos)
+ a.setPos(e.Rparen)
+ a.Comment = "varargs"
+ for i, arg := range varargs {
+ iaddr := &IndexAddr{
+ X: a,
+ Index: intConst(int64(i)),
+ }
+ iaddr.setType(types.NewPointer(vt))
+ fn.emit(iaddr)
+ emitStore(fn, iaddr, arg, arg.Pos())
+ }
+ s := &Slice{X: a}
+ s.setType(st)
+ args[offset+np] = fn.emit(s)
+ args = args[:offset+np+1]
+ }
+ }
+ return args
+}
+
+// setCall emits to fn code to evaluate all the parameters of a function
+// call e, and populates *c with those values.
+//
+func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ // First deal with the f(...) part and optional receiver.
+ b.setCallFunc(fn, e, c)
+
+ // Then append the other actual parameters.
+ sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature)
+ if sig == nil {
+ panic(fmt.Sprintf("no signature for call of %s", e.Fun))
+ }
+ c.Args = b.emitCallArgs(fn, sig, e, c.Args)
+}
+
+// assignOp emits to fn code to perform loc += incr or loc -= incr.
+func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token) {
+ oldv := loc.load(fn)
+ loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), token.NoPos))
+}
+
+// localValueSpec emits to fn code to define all of the vars in the
+// function-local ValueSpec, spec.
+//
+func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
+ switch {
+ case len(spec.Values) == len(spec.Names):
+ // e.g. var x, y = 0, 1
+ // 1:1 assignment
+ for i, id := range spec.Names {
+ if !isBlankIdent(id) {
+ fn.addLocalForIdent(id)
+ }
+ lval := b.addr(fn, id, false) // non-escaping
+ b.assign(fn, lval, spec.Values[i], true, nil)
+ }
+
+ case len(spec.Values) == 0:
+ // e.g. var x, y int
+ // Locals are implicitly zero-initialized.
+ for _, id := range spec.Names {
+ if !isBlankIdent(id) {
+ lhs := fn.addLocalForIdent(id)
+ if fn.debugInfo() {
+ emitDebugRef(fn, id, lhs, true)
+ }
+ }
+ }
+
+ default:
+ // e.g. var x, y = pos()
+ tuple := b.exprN(fn, spec.Values[0])
+ for i, id := range spec.Names {
+ if !isBlankIdent(id) {
+ fn.addLocalForIdent(id)
+ lhs := b.addr(fn, id, false) // non-escaping
+ lhs.store(fn, emitExtract(fn, tuple, i))
+ }
+ }
+ }
+}
+
+// assignStmt emits code to fn for a parallel assignment of rhss to lhss.
+// isDef is true if this is a short variable declaration (:=).
+//
+// Note the similarity with localValueSpec.
+//
+func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
+ // Side effects of all LHSs and RHSs must occur in left-to-right order.
+ lvals := make([]lvalue, len(lhss))
+ isZero := make([]bool, len(lhss))
+ for i, lhs := range lhss {
+ var lval lvalue = blank{}
+ if !isBlankIdent(lhs) {
+ if isDef {
+ if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil {
+ fn.addNamedLocal(obj)
+ isZero[i] = true
+ }
+ }
+ lval = b.addr(fn, lhs, false) // non-escaping
+ }
+ lvals[i] = lval
+ }
+ if len(lhss) == len(rhss) {
+ // Simple assignment: x = f() (!isDef)
+ // Parallel assignment: x, y = f(), g() (!isDef)
+ // or short var decl: x, y := f(), g() (isDef)
+ //
+ // In all cases, the RHSs may refer to the LHSs,
+ // so we need a storebuf.
+ var sb storebuf
+ for i := range rhss {
+ b.assign(fn, lvals[i], rhss[i], isZero[i], &sb)
+ }
+ sb.emit(fn)
+ } else {
+ // e.g. x, y = pos()
+ tuple := b.exprN(fn, rhss[0])
+ emitDebugRef(fn, rhss[0], tuple, false)
+ for i, lval := range lvals {
+ lval.store(fn, emitExtract(fn, tuple, i))
+ }
+ }
+}
+
+// arrayLen returns the length of the array whose composite literal elements are elts.
+func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
+ var max int64 = -1
+ var i int64 = -1
+ for _, e := range elts {
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ i = b.expr(fn, kv.Key).(*Const).Int64()
+ } else {
+ i++
+ }
+ if i > max {
+ max = i
+ }
+ }
+ return max + 1
+}
+
+// compLit emits to fn code to initialize a composite literal e at
+// address addr with type typ.
+//
+// Nested composite literals are recursively initialized in place
+// where possible. If isZero is true, compLit assumes that addr
+// holds the zero value for typ.
+//
+// Because the elements of a composite literal may refer to the
+// variables being updated, as in the second line below,
+// x := T{a: 1}
+// x = T{a: x.a}
+// all the reads must occur before all the writes. Thus all stores to
+// loc are emitted to the storebuf sb for later execution.
+//
+// A CompositeLit may have pointer type only in the recursive (nested)
+// case when the type name is implicit. e.g. in []*T{{}}, the inner
+// literal has type *T behaves like &T{}.
+// In that case, addr must hold a T, not a *T.
+//
+func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
+ typ := deref(fn.Pkg.typeOf(e))
+ switch t := typ.Underlying().(type) {
+ case *types.Struct:
+ if !isZero && len(e.Elts) != t.NumFields() {
+ // memclear
+ sb.store(&address{addr, e.Lbrace, nil},
+ zeroValue(fn, deref(addr.Type())))
+ isZero = true
+ }
+ for i, e := range e.Elts {
+ fieldIndex := i
+ pos := e.Pos()
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ fname := kv.Key.(*ast.Ident).Name
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ sf := t.Field(i)
+ if sf.Name() == fname {
+ fieldIndex = i
+ pos = kv.Colon
+ e = kv.Value
+ break
+ }
+ }
+ }
+ sf := t.Field(fieldIndex)
+ faddr := &FieldAddr{
+ X: addr,
+ Field: fieldIndex,
+ }
+ faddr.setType(types.NewPointer(sf.Type()))
+ fn.emit(faddr)
+ b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb)
+ }
+
+ case *types.Array, *types.Slice:
+ var at *types.Array
+ var array Value
+ switch t := t.(type) {
+ case *types.Slice:
+ at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
+ alloc := emitNew(fn, at, e.Lbrace)
+ alloc.Comment = "slicelit"
+ array = alloc
+ case *types.Array:
+ at = t
+ array = addr
+
+ if !isZero && int64(len(e.Elts)) != at.Len() {
+ // memclear
+ sb.store(&address{array, e.Lbrace, nil},
+ zeroValue(fn, deref(array.Type())))
+ }
+ }
+
+ var idx *Const
+ for _, e := range e.Elts {
+ pos := e.Pos()
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ idx = b.expr(fn, kv.Key).(*Const)
+ pos = kv.Colon
+ e = kv.Value
+ } else {
+ var idxval int64
+ if idx != nil {
+ idxval = idx.Int64() + 1
+ }
+ idx = intConst(idxval)
+ }
+ iaddr := &IndexAddr{
+ X: array,
+ Index: idx,
+ }
+ iaddr.setType(types.NewPointer(at.Elem()))
+ fn.emit(iaddr)
+ if t != at { // slice
+ // backing array is unaliased => storebuf not needed.
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil)
+ } else {
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb)
+ }
+ }
+
+ if t != at { // slice
+ s := &Slice{X: array}
+ s.setPos(e.Lbrace)
+ s.setType(typ)
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s))
+ }
+
+ case *types.Map:
+ m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))}
+ m.setPos(e.Lbrace)
+ m.setType(typ)
+ fn.emit(m)
+ for _, e := range e.Elts {
+ e := e.(*ast.KeyValueExpr)
+
+ // If a key expression in a map literal is itself a
+ // composite literal, the type may be omitted.
+ // For example:
+ // map[*struct{}]bool{{}: true}
+ // An &-operation may be implied:
+ // map[*struct{}]bool{&struct{}{}: true}
+ var key Value
+ if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) {
+ // A CompositeLit never evaluates to a pointer,
+ // so if the type of the location is a pointer,
+ // an &-operation is implied.
+ key = b.addr(fn, e.Key, true).address(fn)
+ } else {
+ key = b.expr(fn, e.Key)
+ }
+
+ loc := element{
+ m: m,
+ k: emitConv(fn, key, t.Key()),
+ t: t.Elem(),
+ pos: e.Colon,
+ }
+
+ // We call assign() only because it takes care
+ // of any &-operation required in the recursive
+ // case, e.g.,
+ // map[int]*struct{}{0: {}} implies &struct{}{}.
+ // In-place update is of course impossible,
+ // and no storebuf is needed.
+ b.assign(fn, &loc, e.Value, true, nil)
+ }
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m)
+
+ default:
+ panic("unexpected CompositeLit type: " + t.String())
+ }
+}
+
+// switchStmt emits to fn code for the switch statement s, optionally
+// labelled by label.
+//
+func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
+ // We treat SwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches()
+ // to those cases that are free of side effects.
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ var tag Value = vTrue
+ if s.Tag != nil {
+ tag = b.expr(fn, s.Tag)
+ }
+ done := fn.newBasicBlock("switch.done")
+ if label != nil {
+ label._break = done
+ }
+ // We pull the default case (if present) down to the end.
+ // But each fallthrough label must point to the next
+ // body block in source order, so we preallocate a
+ // body block (fallthru) for the next case.
+ // Unfortunately this makes for a confusing block order.
+ var dfltBody *[]ast.Stmt
+ var dfltFallthrough *BasicBlock
+ var fallthru, dfltBlock *BasicBlock
+ ncases := len(s.Body.List)
+ for i, clause := range s.Body.List {
+ body := fallthru
+ if body == nil {
+ body = fn.newBasicBlock("switch.body") // first case only
+ }
+
+ // Preallocate body block for the next case.
+ fallthru = done
+ if i+1 < ncases {
+ fallthru = fn.newBasicBlock("switch.body")
+ }
+
+ cc := clause.(*ast.CaseClause)
+ if cc.List == nil {
+ // Default case.
+ dfltBody = &cc.Body
+ dfltFallthrough = fallthru
+ dfltBlock = body
+ continue
+ }
+
+ var nextCond *BasicBlock
+ for _, cond := range cc.List {
+ nextCond = fn.newBasicBlock("switch.next")
+ // TODO(adonovan): opt: when tag==vTrue, we'd
+ // get better code if we use b.cond(cond)
+ // instead of BinOp(EQL, tag, b.expr(cond))
+ // followed by If. Don't forget conversions
+ // though.
+ cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos)
+ emitIf(fn, cond, body, nextCond)
+ fn.currentBlock = nextCond
+ }
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _fallthrough: fallthru,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = nextCond
+ }
+ if dfltBlock != nil {
+ emitJump(fn, dfltBlock)
+ fn.currentBlock = dfltBlock
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _fallthrough: dfltFallthrough,
+ }
+ b.stmtList(fn, *dfltBody)
+ fn.targets = fn.targets.tail
+ }
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// typeSwitchStmt emits to fn code for the type switch statement s, optionally
+// labelled by label.
+//
+func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
+ // We treat TypeSwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches().
+
+ // Typeswitch lowering:
+ //
+ // var x X
+ // switch y := x.(type) {
+ // case T1, T2: S1 // >1 (y := x)
+ // case nil: SN // nil (y := x)
+ // default: SD // 0 types (y := x)
+ // case T3: S3 // 1 type (y := x.(T3))
+ // }
+ //
+ // ...s.Init...
+ // x := eval x
+ // .caseT1:
+ // t1, ok1 := typeswitch,ok x <T1>
+ // if ok1 then goto S1 else goto .caseT2
+ // .caseT2:
+ // t2, ok2 := typeswitch,ok x <T2>
+ // if ok2 then goto S1 else goto .caseNil
+ // .S1:
+ // y := x
+ // ...S1...
+ // goto done
+ // .caseNil:
+ // if t2, ok2 := typeswitch,ok x <T2>
+ // if x == nil then goto SN else goto .caseT3
+ // .SN:
+ // y := x
+ // ...SN...
+ // goto done
+ // .caseT3:
+ // t3, ok3 := typeswitch,ok x <T3>
+ // if ok3 then goto S3 else goto default
+ // .S3:
+ // y := t3
+ // ...S3...
+ // goto done
+ // .default:
+ // y := x
+ // ...SD...
+ // goto done
+ // .done:
+
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+
+ var x Value
+ switch ass := s.Assign.(type) {
+ case *ast.ExprStmt: // x.(type)
+ x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X)
+ case *ast.AssignStmt: // y := x.(type)
+ x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
+ }
+
+ done := fn.newBasicBlock("typeswitch.done")
+ if label != nil {
+ label._break = done
+ }
+ var default_ *ast.CaseClause
+ for _, clause := range s.Body.List {
+ cc := clause.(*ast.CaseClause)
+ if cc.List == nil {
+ default_ = cc
+ continue
+ }
+ body := fn.newBasicBlock("typeswitch.body")
+ var next *BasicBlock
+ var casetype types.Type
+ var ti Value // ti, ok := typeassert,ok x <Ti>
+ for _, cond := range cc.List {
+ next = fn.newBasicBlock("typeswitch.next")
+ casetype = fn.Pkg.typeOf(cond)
+ var condv Value
+ if casetype == tUntypedNil {
+ condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos)
+ ti = x
+ } else {
+ yok := emitTypeTest(fn, x, casetype, cc.Case)
+ ti = emitExtract(fn, yok, 0)
+ condv = emitExtract(fn, yok, 1)
+ }
+ emitIf(fn, condv, body, next)
+ fn.currentBlock = next
+ }
+ if len(cc.List) != 1 {
+ ti = x
+ }
+ fn.currentBlock = body
+ b.typeCaseBody(fn, cc, ti, done)
+ fn.currentBlock = next
+ }
+ if default_ != nil {
+ b.typeCaseBody(fn, default_, x, done)
+ } else {
+ emitJump(fn, done)
+ }
+ fn.currentBlock = done
+}
+
+func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
+ if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
+ // In a switch y := x.(type), each case clause
+ // implicitly declares a distinct object y.
+ // In a single-type case, y has that type.
+ // In multi-type cases, 'case nil' and default,
+ // y has the same type as the interface operand.
+ emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos())
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+}
+
+// selectStmt emits to fn code for the select statement s, optionally
+// labelled by label.
+//
+func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
+ // A blocking select of a single case degenerates to a
+ // simple send or receive.
+ // TODO(adonovan): opt: is this optimization worth its weight?
+ if len(s.Body.List) == 1 {
+ clause := s.Body.List[0].(*ast.CommClause)
+ if clause.Comm != nil {
+ b.stmt(fn, clause.Comm)
+ done := fn.newBasicBlock("select.done")
+ if label != nil {
+ label._break = done
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, clause.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = done
+ return
+ }
+ }
+
+ // First evaluate all channels in all cases, and find
+ // the directions of each state.
+ var states []*SelectState
+ blocking := true
+ debugInfo := fn.debugInfo()
+ for _, clause := range s.Body.List {
+ var st *SelectState
+ switch comm := clause.(*ast.CommClause).Comm.(type) {
+ case nil: // default case
+ blocking = false
+ continue
+
+ case *ast.SendStmt: // ch<- i
+ ch := b.expr(fn, comm.Chan)
+ st = &SelectState{
+ Dir: types.SendOnly,
+ Chan: ch,
+ Send: emitConv(fn, b.expr(fn, comm.Value),
+ ch.Type().Underlying().(*types.Chan).Elem()),
+ Pos: comm.Arrow,
+ }
+ if debugInfo {
+ st.DebugNode = comm
+ }
+
+ case *ast.AssignStmt: // x := <-ch
+ recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr)
+ st = &SelectState{
+ Dir: types.RecvOnly,
+ Chan: b.expr(fn, recv.X),
+ Pos: recv.OpPos,
+ }
+ if debugInfo {
+ st.DebugNode = recv
+ }
+
+ case *ast.ExprStmt: // <-ch
+ recv := unparen(comm.X).(*ast.UnaryExpr)
+ st = &SelectState{
+ Dir: types.RecvOnly,
+ Chan: b.expr(fn, recv.X),
+ Pos: recv.OpPos,
+ }
+ if debugInfo {
+ st.DebugNode = recv
+ }
+ }
+ states = append(states, st)
+ }
+
+ // We dispatch on the (fair) result of Select using a
+ // sequential if-else chain, in effect:
+ //
+ // idx, recvOk, r0...r_n-1 := select(...)
+ // if idx == 0 { // receive on channel 0 (first receive => r0)
+ // x, ok := r0, recvOk
+ // ...state0...
+ // } else if v == 1 { // send on channel 1
+ // ...state1...
+ // } else {
+ // ...default...
+ // }
+ sel := &Select{
+ States: states,
+ Blocking: blocking,
+ }
+ sel.setPos(s.Select)
+ var vars []*types.Var
+ vars = append(vars, varIndex, varOk)
+ for _, st := range states {
+ if st.Dir == types.RecvOnly {
+ tElem := st.Chan.Type().Underlying().(*types.Chan).Elem()
+ vars = append(vars, anonVar(tElem))
+ }
+ }
+ sel.setType(types.NewTuple(vars...))
+
+ fn.emit(sel)
+ idx := emitExtract(fn, sel, 0)
+
+ done := fn.newBasicBlock("select.done")
+ if label != nil {
+ label._break = done
+ }
+
+ var defaultBody *[]ast.Stmt
+ state := 0
+ r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
+ for _, cc := range s.Body.List {
+ clause := cc.(*ast.CommClause)
+ if clause.Comm == nil {
+ defaultBody = &clause.Body
+ continue
+ }
+ body := fn.newBasicBlock("select.body")
+ next := fn.newBasicBlock("select.next")
+ emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next)
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ switch comm := clause.Comm.(type) {
+ case *ast.ExprStmt: // <-ch
+ if debugInfo {
+ v := emitExtract(fn, sel, r)
+ emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
+ }
+ r++
+
+ case *ast.AssignStmt: // x := <-states[state].Chan
+ if comm.Tok == token.DEFINE {
+ fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident))
+ }
+ x := b.addr(fn, comm.Lhs[0], false) // non-escaping
+ v := emitExtract(fn, sel, r)
+ if debugInfo {
+ emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
+ }
+ x.store(fn, v)
+
+ if len(comm.Lhs) == 2 { // x, ok := ...
+ if comm.Tok == token.DEFINE {
+ fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident))
+ }
+ ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
+ ok.store(fn, emitExtract(fn, sel, 1))
+ }
+ r++
+ }
+ b.stmtList(fn, clause.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = next
+ state++
+ }
+ if defaultBody != nil {
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, *defaultBody)
+ fn.targets = fn.targets.tail
+ } else {
+ // A blocking select must match some case.
+ // (This should really be a runtime.errorString, not a string.)
+ fn.emit(&Panic{
+ X: emitConv(fn, stringConst("blocking select matched no case"), tEface),
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ }
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// forStmt emits to fn code for the for statement s, optionally
+// labelled by label.
+//
+func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
+ // ...init...
+ // jump loop
+ // loop:
+ // if cond goto body else done
+ // body:
+ // ...body...
+ // jump post
+ // post: (target of continue)
+ // ...post...
+ // jump loop
+ // done: (target of break)
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ body := fn.newBasicBlock("for.body")
+ done := fn.newBasicBlock("for.done") // target of 'break'
+ loop := body // target of back-edge
+ if s.Cond != nil {
+ loop = fn.newBasicBlock("for.loop")
+ }
+ cont := loop // target of 'continue'
+ if s.Post != nil {
+ cont = fn.newBasicBlock("for.post")
+ }
+ if label != nil {
+ label._break = done
+ label._continue = cont
+ }
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+ if loop != body {
+ b.cond(fn, s.Cond, body, done)
+ fn.currentBlock = body
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: cont,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, cont)
+
+ if s.Post != nil {
+ fn.currentBlock = cont
+ b.stmt(fn, s.Post)
+ emitJump(fn, loop) // back-edge
+ }
+ fn.currentBlock = done
+}
+
+// rangeIndexed emits to fn the header for an integer-indexed loop
+// over array, *array or slice value x.
+// The v result is defined only if tv is non-nil.
+// forPos is the position of the "for" token.
+//
+func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
+ //
+ // length = len(x)
+ // index = -1
+ // loop: (target of continue)
+ // index++
+ // if index < length goto body else done
+ // body:
+ // k = index
+ // v = x[index]
+ // ...body...
+ // jump loop
+ // done: (target of break)
+
+ // Determine number of iterations.
+ var length Value
+ if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok {
+ // For array or *array, the number of iterations is
+ // known statically thanks to the type. We avoid a
+ // data dependence upon x, permitting later dead-code
+ // elimination if x is pure, static unrolling, etc.
+ // Ranging over a nil *array may have >0 iterations.
+ // We still generate code for x, in case it has effects.
+ length = intConst(arr.Len())
+ } else {
+ // length = len(x).
+ var c Call
+ c.Call.Value = makeLen(x.Type())
+ c.Call.Args = []Value{x}
+ c.setType(tInt)
+ length = fn.emit(&c)
+ }
+
+ index := fn.addLocal(tInt, token.NoPos)
+ emitStore(fn, index, intConst(-1), pos)
+
+ loop = fn.newBasicBlock("rangeindex.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ incr := &BinOp{
+ Op: token.ADD,
+ X: emitLoad(fn, index),
+ Y: vOne,
+ }
+ incr.setType(tInt)
+ emitStore(fn, index, fn.emit(incr), pos)
+
+ body := fn.newBasicBlock("rangeindex.body")
+ done = fn.newBasicBlock("rangeindex.done")
+ emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done)
+ fn.currentBlock = body
+
+ k = emitLoad(fn, index)
+ if tv != nil {
+ switch t := x.Type().Underlying().(type) {
+ case *types.Array:
+ instr := &Index{
+ X: x,
+ Index: k,
+ }
+ instr.setType(t.Elem())
+ v = fn.emit(instr)
+
+ case *types.Pointer: // *array
+ instr := &IndexAddr{
+ X: x,
+ Index: k,
+ }
+ instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
+ v = emitLoad(fn, fn.emit(instr))
+
+ case *types.Slice:
+ instr := &IndexAddr{
+ X: x,
+ Index: k,
+ }
+ instr.setType(types.NewPointer(t.Elem()))
+ v = emitLoad(fn, fn.emit(instr))
+
+ default:
+ panic("rangeIndexed x:" + t.String())
+ }
+ }
+ return
+}
+
+// rangeIter emits to fn the header for a loop using
+// Range/Next/Extract to iterate over map or string value x.
+// tk and tv are the types of the key/value results k and v, or nil
+// if the respective component is not wanted.
+//
+func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
+ //
+ // it = range x
+ // loop: (target of continue)
+ // okv = next it (ok, key, value)
+ // ok = extract okv #0
+ // if ok goto body else done
+ // body:
+ // k = extract okv #1
+ // v = extract okv #2
+ // ...body...
+ // jump loop
+ // done: (target of break)
+ //
+
+ if tk == nil {
+ tk = tInvalid
+ }
+ if tv == nil {
+ tv = tInvalid
+ }
+
+ rng := &Range{X: x}
+ rng.setPos(pos)
+ rng.setType(tRangeIter)
+ it := fn.emit(rng)
+
+ loop = fn.newBasicBlock("rangeiter.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ _, isString := x.Type().Underlying().(*types.Basic)
+
+ okv := &Next{
+ Iter: it,
+ IsString: isString,
+ }
+ okv.setType(types.NewTuple(
+ varOk,
+ newVar("k", tk),
+ newVar("v", tv),
+ ))
+ fn.emit(okv)
+
+ body := fn.newBasicBlock("rangeiter.body")
+ done = fn.newBasicBlock("rangeiter.done")
+ emitIf(fn, emitExtract(fn, okv, 0), body, done)
+ fn.currentBlock = body
+
+ if tk != tInvalid {
+ k = emitExtract(fn, okv, 1)
+ }
+ if tv != tInvalid {
+ v = emitExtract(fn, okv, 2)
+ }
+ return
+}
+
+// rangeChan emits to fn the header for a loop that receives from
+// channel x until it fails.
+// tk is the channel's element type, or nil if the k result is
+// not wanted
+// pos is the position of the '=' or ':=' token.
+//
+func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
+ //
+ // loop: (target of continue)
+ // ko = <-x (key, ok)
+ // ok = extract ko #1
+ // if ok goto body else done
+ // body:
+ // k = extract ko #0
+ // ...
+ // goto loop
+ // done: (target of break)
+
+ loop = fn.newBasicBlock("rangechan.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+ recv := &UnOp{
+ Op: token.ARROW,
+ X: x,
+ CommaOk: true,
+ }
+ recv.setPos(pos)
+ recv.setType(types.NewTuple(
+ newVar("k", x.Type().Underlying().(*types.Chan).Elem()),
+ varOk,
+ ))
+ ko := fn.emit(recv)
+ body := fn.newBasicBlock("rangechan.body")
+ done = fn.newBasicBlock("rangechan.done")
+ emitIf(fn, emitExtract(fn, ko, 1), body, done)
+ fn.currentBlock = body
+ if tk != nil {
+ k = emitExtract(fn, ko, 0)
+ }
+ return
+}
+
+// rangeStmt emits to fn code for the range statement s, optionally
+// labelled by label.
+//
+func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
+ var tk, tv types.Type
+ if s.Key != nil && !isBlankIdent(s.Key) {
+ tk = fn.Pkg.typeOf(s.Key)
+ }
+ if s.Value != nil && !isBlankIdent(s.Value) {
+ tv = fn.Pkg.typeOf(s.Value)
+ }
+
+ // If iteration variables are defined (:=), this
+ // occurs once outside the loop.
+ //
+ // Unlike a short variable declaration, a RangeStmt
+ // using := never redeclares an existing variable; it
+ // always creates a new one.
+ if s.Tok == token.DEFINE {
+ if tk != nil {
+ fn.addLocalForIdent(s.Key.(*ast.Ident))
+ }
+ if tv != nil {
+ fn.addLocalForIdent(s.Value.(*ast.Ident))
+ }
+ }
+
+ x := b.expr(fn, s.X)
+
+ var k, v Value
+ var loop, done *BasicBlock
+ switch rt := x.Type().Underlying().(type) {
+ case *types.Slice, *types.Array, *types.Pointer: // *array
+ k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For)
+
+ case *types.Chan:
+ k, loop, done = b.rangeChan(fn, x, tk, s.For)
+
+ case *types.Map, *types.Basic: // string
+ k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
+
+ default:
+ panic("Cannot range over: " + rt.String())
+ }
+
+ // Evaluate both LHS expressions before we update either.
+ var kl, vl lvalue
+ if tk != nil {
+ kl = b.addr(fn, s.Key, false) // non-escaping
+ }
+ if tv != nil {
+ vl = b.addr(fn, s.Value, false) // non-escaping
+ }
+ if tk != nil {
+ kl.store(fn, k)
+ }
+ if tv != nil {
+ vl.store(fn, v)
+ }
+
+ if label != nil {
+ label._break = done
+ label._continue = loop
+ }
+
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: loop,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, loop) // back-edge
+ fn.currentBlock = done
+}
+
+// stmt lowers statement s to SSA form, emitting code to fn.
+func (b *builder) stmt(fn *Function, _s ast.Stmt) {
+ // The label of the current statement. If non-nil, its _goto
+ // target is always set; its _break and _continue are set only
+ // within the body of switch/typeswitch/select/for/range.
+ // It is effectively an additional default-nil parameter of stmt().
+ var label *lblock
+start:
+ switch s := _s.(type) {
+ case *ast.EmptyStmt:
+ // ignore. (Usually removed by gofmt.)
+
+ case *ast.DeclStmt: // Con, Var or Typ
+ d := s.Decl.(*ast.GenDecl)
+ if d.Tok == token.VAR {
+ for _, spec := range d.Specs {
+ if vs, ok := spec.(*ast.ValueSpec); ok {
+ b.localValueSpec(fn, vs)
+ }
+ }
+ }
+
+ case *ast.LabeledStmt:
+ label = fn.labelledBlock(s.Label)
+ emitJump(fn, label._goto)
+ fn.currentBlock = label._goto
+ _s = s.Stmt
+ goto start // effectively: tailcall stmt(fn, s.Stmt, label)
+
+ case *ast.ExprStmt:
+ b.expr(fn, s.X)
+
+ case *ast.SendStmt:
+ fn.emit(&Send{
+ Chan: b.expr(fn, s.Chan),
+ X: emitConv(fn, b.expr(fn, s.Value),
+ fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
+ pos: s.Arrow,
+ })
+
+ case *ast.IncDecStmt:
+ op := token.ADD
+ if s.Tok == token.DEC {
+ op = token.SUB
+ }
+ loc := b.addr(fn, s.X, false)
+ b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op)
+
+ case *ast.AssignStmt:
+ switch s.Tok {
+ case token.ASSIGN, token.DEFINE:
+ b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
+
+ default: // +=, etc.
+ op := s.Tok + token.ADD - token.ADD_ASSIGN
+ b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op)
+ }
+
+ case *ast.GoStmt:
+ // The "intrinsics" new/make/len/cap are forbidden here.
+ // panic is treated like an ordinary function call.
+ v := Go{pos: s.Go}
+ b.setCall(fn, s.Call, &v.Call)
+ fn.emit(&v)
+
+ case *ast.DeferStmt:
+ // The "intrinsics" new/make/len/cap are forbidden here.
+ // panic is treated like an ordinary function call.
+ v := Defer{pos: s.Defer}
+ b.setCall(fn, s.Call, &v.Call)
+ fn.emit(&v)
+
+ // A deferred call can cause recovery from panic,
+ // and control resumes at the Recover block.
+ createRecoverBlock(fn)
+
+ case *ast.ReturnStmt:
+ var results []Value
+ if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 {
+ // Return of one expression in a multi-valued function.
+ tuple := b.exprN(fn, s.Results[0])
+ ttuple := tuple.Type().(*types.Tuple)
+ for i, n := 0, ttuple.Len(); i < n; i++ {
+ results = append(results,
+ emitConv(fn, emitExtract(fn, tuple, i),
+ fn.Signature.Results().At(i).Type()))
+ }
+ } else {
+ // 1:1 return, or no-arg return in non-void function.
+ for i, r := range s.Results {
+ v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type())
+ results = append(results, v)
+ }
+ }
+ if fn.namedResults != nil {
+ // Function has named result parameters (NRPs).
+ // Perform parallel assignment of return operands to NRPs.
+ for i, r := range results {
+ emitStore(fn, fn.namedResults[i], r, s.Return)
+ }
+ }
+ // Run function calls deferred in this
+ // function when explicitly returning from it.
+ fn.emit(new(RunDefers))
+ if fn.namedResults != nil {
+ // Reload NRPs to form the result tuple.
+ results = results[:0]
+ for _, r := range fn.namedResults {
+ results = append(results, emitLoad(fn, r))
+ }
+ }
+ fn.emit(&Return{Results: results, pos: s.Return})
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+
+ case *ast.BranchStmt:
+ var block *BasicBlock
+ switch s.Tok {
+ case token.BREAK:
+ if s.Label != nil {
+ block = fn.labelledBlock(s.Label)._break
+ } else {
+ for t := fn.targets; t != nil && block == nil; t = t.tail {
+ block = t._break
+ }
+ }
+
+ case token.CONTINUE:
+ if s.Label != nil {
+ block = fn.labelledBlock(s.Label)._continue
+ } else {
+ for t := fn.targets; t != nil && block == nil; t = t.tail {
+ block = t._continue
+ }
+ }
+
+ case token.FALLTHROUGH:
+ for t := fn.targets; t != nil && block == nil; t = t.tail {
+ block = t._fallthrough
+ }
+
+ case token.GOTO:
+ block = fn.labelledBlock(s.Label)._goto
+ }
+ emitJump(fn, block)
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+
+ case *ast.BlockStmt:
+ b.stmtList(fn, s.List)
+
+ case *ast.IfStmt:
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ then := fn.newBasicBlock("if.then")
+ done := fn.newBasicBlock("if.done")
+ els := done
+ if s.Else != nil {
+ els = fn.newBasicBlock("if.else")
+ }
+ b.cond(fn, s.Cond, then, els)
+ fn.currentBlock = then
+ b.stmt(fn, s.Body)
+ emitJump(fn, done)
+
+ if s.Else != nil {
+ fn.currentBlock = els
+ b.stmt(fn, s.Else)
+ emitJump(fn, done)
+ }
+
+ fn.currentBlock = done
+
+ case *ast.SwitchStmt:
+ b.switchStmt(fn, s, label)
+
+ case *ast.TypeSwitchStmt:
+ b.typeSwitchStmt(fn, s, label)
+
+ case *ast.SelectStmt:
+ b.selectStmt(fn, s, label)
+
+ case *ast.ForStmt:
+ b.forStmt(fn, s, label)
+
+ case *ast.RangeStmt:
+ b.rangeStmt(fn, s, label)
+
+ default:
+ panic(fmt.Sprintf("unexpected statement kind: %T", s))
+ }
+}
+
+// buildFunction builds SSA code for the body of function fn. Idempotent.
+func (b *builder) buildFunction(fn *Function) {
+ if fn.Blocks != nil {
+ return // building already started
+ }
+
+ var recvField *ast.FieldList
+ var body *ast.BlockStmt
+ var functype *ast.FuncType
+ switch n := fn.syntax.(type) {
+ case nil:
+ return // not a Go source function. (Synthetic, or from object file.)
+ case *ast.FuncDecl:
+ functype = n.Type
+ recvField = n.Recv
+ body = n.Body
+ case *ast.FuncLit:
+ functype = n.Type
+ body = n.Body
+ default:
+ panic(n)
+ }
+
+ if body == nil {
+ // External function.
+ if fn.Params == nil {
+ // This condition ensures we add a non-empty
+ // params list once only, but we may attempt
+ // the degenerate empty case repeatedly.
+ // TODO(adonovan): opt: don't do that.
+
+ // We set Function.Params even though there is no body
+ // code to reference them. This simplifies clients.
+ if recv := fn.Signature.Recv(); recv != nil {
+ fn.addParamObj(recv)
+ }
+ params := fn.Signature.Params()
+ for i, n := 0, params.Len(); i < n; i++ {
+ fn.addParamObj(params.At(i))
+ }
+ }
+ return
+ }
+ if fn.Prog.mode&LogSource != 0 {
+ defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
+ }
+ fn.startBody()
+ fn.createSyntacticParams(recvField, functype)
+ b.stmt(fn, body)
+ if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
+ // Control fell off the end of the function's body block.
+ //
+ // Block optimizations eliminate the current block, if
+ // unreachable. It is a builder invariant that
+ // if this no-arg return is ill-typed for
+ // fn.Signature.Results, this block must be
+ // unreachable. The sanity checker checks this.
+ fn.emit(new(RunDefers))
+ fn.emit(new(Return))
+ }
+ fn.finishBody()
+}
+
+// buildFuncDecl builds SSA code for the function or method declared
+// by decl in package pkg.
+//
+func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
+ id := decl.Name
+ if isBlankIdent(id) {
+ return // discard
+ }
+ fn := pkg.values[pkg.info.Defs[id]].(*Function)
+ if decl.Recv == nil && id.Name == "init" {
+ var v Call
+ v.Call.Value = fn
+ v.setType(types.NewTuple())
+ pkg.init.emit(&v)
+ }
+ b.buildFunction(fn)
+}
+
+// Build calls Package.Build for each package in prog.
+// Building occurs in parallel unless the BuildSerially mode flag was set.
+//
+// Build is intended for whole-program analysis; a typical compiler
+// need only build a single package.
+//
+// Build is idempotent and thread-safe.
+//
+func (prog *Program) Build() {
+ var wg sync.WaitGroup
+ for _, p := range prog.packages {
+ if prog.mode&BuildSerially != 0 {
+ p.Build()
+ } else {
+ wg.Add(1)
+ go func(p *Package) {
+ p.Build()
+ wg.Done()
+ }(p)
+ }
+ }
+ wg.Wait()
+}
+
+// Build builds SSA code for all functions and vars in package p.
+//
+// Precondition: CreatePackage must have been called for all of p's
+// direct imports (and hence its direct imports must have been
+// error-free).
+//
+// Build is idempotent and thread-safe.
+//
+func (p *Package) Build() { p.buildOnce.Do(p.build) }
+
+func (p *Package) build() {
+ if p.info == nil {
+ return // synthetic package, e.g. "testmain"
+ }
+
+ // Ensure we have runtime type info for all exported members.
+ // TODO(adonovan): ideally belongs in memberFromObject, but
+ // that would require package creation in topological order.
+ for name, mem := range p.Members {
+ if ast.IsExported(name) {
+ p.Prog.needMethodsOf(mem.Type())
+ }
+ }
+ if p.Prog.mode&LogSource != 0 {
+ defer logStack("build %s", p)()
+ }
+ init := p.init
+ init.startBody()
+
+ var done *BasicBlock
+
+ if p.Prog.mode&BareInits == 0 {
+ // Make init() skip if package is already initialized.
+ initguard := p.Var("init$guard")
+ doinit := init.newBasicBlock("init.start")
+ done = init.newBasicBlock("init.done")
+ emitIf(init, emitLoad(init, initguard), done, doinit)
+ init.currentBlock = doinit
+ emitStore(init, initguard, vTrue, token.NoPos)
+
+ // Call the init() function of each package we import.
+ for _, pkg := range p.Pkg.Imports() {
+ prereq := p.Prog.packages[pkg]
+ if prereq == nil {
+ panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
+ }
+ var v Call
+ v.Call.Value = prereq.init
+ v.Call.pos = init.pos
+ v.setType(types.NewTuple())
+ init.emit(&v)
+ }
+ }
+
+ var b builder
+
+ // Initialize package-level vars in correct order.
+ for _, varinit := range p.info.InitOrder {
+ if init.Prog.mode&LogSource != 0 {
+ fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
+ varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
+ }
+ if len(varinit.Lhs) == 1 {
+ // 1:1 initialization: var x, y = a(), b()
+ var lval lvalue
+ if v := varinit.Lhs[0]; v.Name() != "_" {
+ lval = &address{addr: p.values[v].(*Global), pos: v.Pos()}
+ } else {
+ lval = blank{}
+ }
+ b.assign(init, lval, varinit.Rhs, true, nil)
+ } else {
+ // n:1 initialization: var x, y := f()
+ tuple := b.exprN(init, varinit.Rhs)
+ for i, v := range varinit.Lhs {
+ if v.Name() == "_" {
+ continue
+ }
+ emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos())
+ }
+ }
+ }
+
+ // Build all package-level functions, init functions
+ // and methods, including unreachable/blank ones.
+ // We build them in source order, but it's not significant.
+ for _, file := range p.files {
+ for _, decl := range file.Decls {
+ if decl, ok := decl.(*ast.FuncDecl); ok {
+ b.buildFuncDecl(p, decl)
+ }
+ }
+ }
+
+ // Finish up init().
+ if p.Prog.mode&BareInits == 0 {
+ emitJump(init, done)
+ init.currentBlock = done
+ }
+ init.emit(new(Return))
+ init.finishBody()
+
+ p.info = nil // We no longer need ASTs or go/types deductions.
+
+ if p.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckPackage(p)
+ }
+}
+
+// Like ObjectOf, but panics instead of returning nil.
+// Only valid during p's create and build phases.
+func (p *Package) objectOf(id *ast.Ident) types.Object {
+ if o := p.info.ObjectOf(id); o != nil {
+ return o
+ }
+ panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
+ id.Name, p.Prog.Fset.Position(id.Pos())))
+}
+
+// Like TypeOf, but panics instead of returning nil.
+// Only valid during p's create and build phases.
+func (p *Package) typeOf(e ast.Expr) types.Type {
+ if T := p.info.TypeOf(e); T != nil {
+ return T
+ }
+ panic(fmt.Sprintf("no type for %T @ %s",
+ e, p.Prog.Fset.Position(e.Pos())))
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/builder_test.go b/vendor/golang.org/x/tools/go/ssa/builder_test.go
new file mode 100644
index 0000000..c45f930
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/builder_test.go
@@ -0,0 +1,500 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+import (
+ "bytes"
+ "go/ast"
+ "go/importer"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+ "reflect"
+ "sort"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+func isEmpty(f *ssa.Function) bool { return f.Blocks == nil }
+
+// Tests that programs partially loaded from gc object files contain
+// functions with no code for the external portions, but are otherwise ok.
+func TestBuildPackage(t *testing.T) {
+ input := `
+package main
+
+import (
+ "bytes"
+ "io"
+ "testing"
+)
+
+func main() {
+ var t testing.T
+ t.Parallel() // static call to external declared method
+ t.Fail() // static call to promoted external declared method
+ testing.Short() // static call to external package-level function
+
+ var w io.Writer = new(bytes.Buffer)
+ w.Write(nil) // interface invoke of external declared method
+}
+`
+
+ // Parse the file.
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "input.go", input, 0)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ // Build an SSA program from the parsed file.
+ // Load its dependencies from gc binary export data.
+ mainPkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset,
+ types.NewPackage("main", ""), []*ast.File{f}, ssa.SanityCheckFunctions)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ // The main package, its direct and indirect dependencies are loaded.
+ deps := []string{
+ // directly imported dependencies:
+ "bytes", "io", "testing",
+ // indirect dependencies mentioned by
+ // the direct imports' export data
+ "sync", "unicode", "time",
+ }
+
+ prog := mainPkg.Prog
+ all := prog.AllPackages()
+ if len(all) <= len(deps) {
+ t.Errorf("unexpected set of loaded packages: %q", all)
+ }
+ for _, path := range deps {
+ pkg := prog.ImportedPackage(path)
+ if pkg == nil {
+ t.Errorf("package not loaded: %q", path)
+ continue
+ }
+
+ // External packages should have no function bodies (except for wrappers).
+ isExt := pkg != mainPkg
+
+ // init()
+ if isExt && !isEmpty(pkg.Func("init")) {
+ t.Errorf("external package %s has non-empty init", pkg)
+ } else if !isExt && isEmpty(pkg.Func("init")) {
+ t.Errorf("main package %s has empty init", pkg)
+ }
+
+ for _, mem := range pkg.Members {
+ switch mem := mem.(type) {
+ case *ssa.Function:
+ // Functions at package level.
+ if isExt && !isEmpty(mem) {
+ t.Errorf("external function %s is non-empty", mem)
+ } else if !isExt && isEmpty(mem) {
+ t.Errorf("function %s is empty", mem)
+ }
+
+ case *ssa.Type:
+ // Methods of named types T.
+ // (In this test, all exported methods belong to *T not T.)
+ if !isExt {
+ t.Fatalf("unexpected name type in main package: %s", mem)
+ }
+ mset := prog.MethodSets.MethodSet(types.NewPointer(mem.Type()))
+ for i, n := 0, mset.Len(); i < n; i++ {
+ m := prog.MethodValue(mset.At(i))
+ // For external types, only synthetic wrappers have code.
+ expExt := !strings.Contains(m.Synthetic, "wrapper")
+ if expExt && !isEmpty(m) {
+ t.Errorf("external method %s is non-empty: %s",
+ m, m.Synthetic)
+ } else if !expExt && isEmpty(m) {
+ t.Errorf("method function %s is empty: %s",
+ m, m.Synthetic)
+ }
+ }
+ }
+ }
+ }
+
+ expectedCallee := []string{
+ "(*testing.T).Parallel",
+ "(*testing.common).Fail",
+ "testing.Short",
+ "N/A",
+ }
+ callNum := 0
+ for _, b := range mainPkg.Func("main").Blocks {
+ for _, instr := range b.Instrs {
+ switch instr := instr.(type) {
+ case ssa.CallInstruction:
+ call := instr.Common()
+ if want := expectedCallee[callNum]; want != "N/A" {
+ got := call.StaticCallee().String()
+ if want != got {
+ t.Errorf("call #%d from main.main: got callee %s, want %s",
+ callNum, got, want)
+ }
+ }
+ callNum++
+ }
+ }
+ }
+ if callNum != 4 {
+ t.Errorf("in main.main: got %d calls, want %d", callNum, 4)
+ }
+}
+
+// TestRuntimeTypes tests that (*Program).RuntimeTypes() includes all necessary types.
+func TestRuntimeTypes(t *testing.T) {
+ tests := []struct {
+ input string
+ want []string
+ }{
+ // An exported package-level type is needed.
+ {`package A; type T struct{}; func (T) f() {}`,
+ []string{"*p.T", "p.T"},
+ },
+ // An unexported package-level type is not needed.
+ {`package B; type t struct{}; func (t) f() {}`,
+ nil,
+ },
+ // Subcomponents of type of exported package-level var are needed.
+ {`package C; import "bytes"; var V struct {*bytes.Buffer}`,
+ []string{"*bytes.Buffer", "*struct{*bytes.Buffer}", "struct{*bytes.Buffer}"},
+ },
+ // Subcomponents of type of unexported package-level var are not needed.
+ {`package D; import "bytes"; var v struct {*bytes.Buffer}`,
+ nil,
+ },
+ // Subcomponents of type of exported package-level function are needed.
+ {`package E; import "bytes"; func F(struct {*bytes.Buffer}) {}`,
+ []string{"*bytes.Buffer", "struct{*bytes.Buffer}"},
+ },
+ // Subcomponents of type of unexported package-level function are not needed.
+ {`package F; import "bytes"; func f(struct {*bytes.Buffer}) {}`,
+ nil,
+ },
+ // Subcomponents of type of exported method of uninstantiated unexported type are not needed.
+ {`package G; import "bytes"; type x struct{}; func (x) G(struct {*bytes.Buffer}) {}; var v x`,
+ nil,
+ },
+ // ...unless used by MakeInterface.
+ {`package G2; import "bytes"; type x struct{}; func (x) G(struct {*bytes.Buffer}) {}; var v interface{} = x{}`,
+ []string{"*bytes.Buffer", "*p.x", "p.x", "struct{*bytes.Buffer}"},
+ },
+ // Subcomponents of type of unexported method are not needed.
+ {`package I; import "bytes"; type X struct{}; func (X) G(struct {*bytes.Buffer}) {}`,
+ []string{"*bytes.Buffer", "*p.X", "p.X", "struct{*bytes.Buffer}"},
+ },
+ // Local types aren't needed.
+ {`package J; import "bytes"; func f() { type T struct {*bytes.Buffer}; var t T; _ = t }`,
+ nil,
+ },
+ // ...unless used by MakeInterface.
+ {`package K; import "bytes"; func f() { type T struct {*bytes.Buffer}; _ = interface{}(T{}) }`,
+ []string{"*bytes.Buffer", "*p.T", "p.T"},
+ },
+ // Types used as operand of MakeInterface are needed.
+ {`package L; import "bytes"; func f() { _ = interface{}(struct{*bytes.Buffer}{}) }`,
+ []string{"*bytes.Buffer", "struct{*bytes.Buffer}"},
+ },
+ // MakeInterface is optimized away when storing to a blank.
+ {`package M; import "bytes"; var _ interface{} = struct{*bytes.Buffer}{}`,
+ nil,
+ },
+ }
+ for _, test := range tests {
+ // Parse the file.
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "input.go", test.input, 0)
+ if err != nil {
+ t.Errorf("test %q: %s", test.input[:15], err)
+ continue
+ }
+
+ // Create a single-file main package.
+ // Load dependencies from gc binary export data.
+ ssapkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset,
+ types.NewPackage("p", ""), []*ast.File{f}, ssa.SanityCheckFunctions)
+ if err != nil {
+ t.Errorf("test %q: %s", test.input[:15], err)
+ continue
+ }
+
+ var typstrs []string
+ for _, T := range ssapkg.Prog.RuntimeTypes() {
+ typstrs = append(typstrs, T.String())
+ }
+ sort.Strings(typstrs)
+
+ if !reflect.DeepEqual(typstrs, test.want) {
+ t.Errorf("test 'package %s': got %q, want %q",
+ f.Name.Name, typstrs, test.want)
+ }
+ }
+}
+
+// TestInit tests that synthesized init functions are correctly formed.
+// Bare init functions omit calls to dependent init functions and the use of
+// an init guard. They are useful in cases where the client uses a different
+// calling convention for init functions, or cases where it is easier for a
+// client to analyze bare init functions. Both of these aspects are used by
+// the llgo compiler for simpler integration with gccgo's runtime library,
+// and to simplify the analysis whereby it deduces which stores to globals
+// can be lowered to global initializers.
+func TestInit(t *testing.T) {
+ tests := []struct {
+ mode ssa.BuilderMode
+ input, want string
+ }{
+ {0, `package A; import _ "errors"; var i int = 42`,
+ `# Name: A.init
+# Package: A
+# Synthetic: package initializer
+func init():
+0: entry P:0 S:2
+ t0 = *init$guard bool
+ if t0 goto 2 else 1
+1: init.start P:1 S:1
+ *init$guard = true:bool
+ t1 = errors.init() ()
+ *i = 42:int
+ jump 2
+2: init.done P:2 S:0
+ return
+
+`},
+ {ssa.BareInits, `package B; import _ "errors"; var i int = 42`,
+ `# Name: B.init
+# Package: B
+# Synthetic: package initializer
+func init():
+0: entry P:0 S:0
+ *i = 42:int
+ return
+
+`},
+ }
+ for _, test := range tests {
+ // Create a single-file main package.
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", test.input)
+ if err != nil {
+ t.Errorf("test %q: %s", test.input[:15], err)
+ continue
+ }
+ conf.CreateFromFiles(f.Name.Name, f)
+
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Errorf("test 'package %s': Load: %s", f.Name.Name, err)
+ continue
+ }
+ prog := ssautil.CreateProgram(lprog, test.mode)
+ mainPkg := prog.Package(lprog.Created[0].Pkg)
+ prog.Build()
+ initFunc := mainPkg.Func("init")
+ if initFunc == nil {
+ t.Errorf("test 'package %s': no init function", f.Name.Name)
+ continue
+ }
+
+ var initbuf bytes.Buffer
+ _, err = initFunc.WriteTo(&initbuf)
+ if err != nil {
+ t.Errorf("test 'package %s': WriteTo: %s", f.Name.Name, err)
+ continue
+ }
+
+ if initbuf.String() != test.want {
+ t.Errorf("test 'package %s': got %s, want %s", f.Name.Name, initbuf.String(), test.want)
+ }
+ }
+}
+
+// TestSyntheticFuncs checks that the expected synthetic functions are
+// created, reachable, and not duplicated.
+func TestSyntheticFuncs(t *testing.T) {
+ const input = `package P
+type T int
+func (T) f() int
+func (*T) g() int
+var (
+ // thunks
+ a = T.f
+ b = T.f
+ c = (struct{T}).f
+ d = (struct{T}).f
+ e = (*T).g
+ f = (*T).g
+ g = (struct{*T}).g
+ h = (struct{*T}).g
+
+ // bounds
+ i = T(0).f
+ j = T(0).f
+ k = new(T).g
+ l = new(T).g
+
+ // wrappers
+ m interface{} = struct{T}{}
+ n interface{} = struct{T}{}
+ o interface{} = struct{*T}{}
+ p interface{} = struct{*T}{}
+ q interface{} = new(struct{T})
+ r interface{} = new(struct{T})
+ s interface{} = new(struct{*T})
+ t interface{} = new(struct{*T})
+)
+`
+ // Parse
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", input)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles(f.Name.Name, f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ // Create and build SSA
+ prog := ssautil.CreateProgram(lprog, 0)
+ prog.Build()
+
+ // Enumerate reachable synthetic functions
+ want := map[string]string{
+ "(*P.T).g$bound": "bound method wrapper for func (*P.T).g() int",
+ "(P.T).f$bound": "bound method wrapper for func (P.T).f() int",
+
+ "(*P.T).g$thunk": "thunk for func (*P.T).g() int",
+ "(P.T).f$thunk": "thunk for func (P.T).f() int",
+ "(struct{*P.T}).g$thunk": "thunk for func (*P.T).g() int",
+ "(struct{P.T}).f$thunk": "thunk for func (P.T).f() int",
+
+ "(*P.T).f": "wrapper for func (P.T).f() int",
+ "(*struct{*P.T}).f": "wrapper for func (P.T).f() int",
+ "(*struct{*P.T}).g": "wrapper for func (*P.T).g() int",
+ "(*struct{P.T}).f": "wrapper for func (P.T).f() int",
+ "(*struct{P.T}).g": "wrapper for func (*P.T).g() int",
+ "(struct{*P.T}).f": "wrapper for func (P.T).f() int",
+ "(struct{*P.T}).g": "wrapper for func (*P.T).g() int",
+ "(struct{P.T}).f": "wrapper for func (P.T).f() int",
+
+ "P.init": "package initializer",
+ }
+ for fn := range ssautil.AllFunctions(prog) {
+ if fn.Synthetic == "" {
+ continue
+ }
+ name := fn.String()
+ wantDescr, ok := want[name]
+ if !ok {
+ t.Errorf("got unexpected/duplicate func: %q: %q", name, fn.Synthetic)
+ continue
+ }
+ delete(want, name)
+
+ if wantDescr != fn.Synthetic {
+ t.Errorf("(%s).Synthetic = %q, want %q", name, fn.Synthetic, wantDescr)
+ }
+ }
+ for fn, descr := range want {
+ t.Errorf("want func: %q: %q", fn, descr)
+ }
+}
+
+// TestPhiElimination ensures that dead phis, including those that
+// participate in a cycle, are properly eliminated.
+func TestPhiElimination(t *testing.T) {
+ const input = `
+package p
+
+func f() error
+
+func g(slice []int) {
+ for {
+ for range slice {
+ // e should not be lifted to a dead φ-node.
+ e := f()
+ h(e)
+ }
+ }
+}
+
+func h(error)
+`
+ // The SSA code for this function should look something like this:
+ // 0:
+ // jump 1
+ // 1:
+ // t0 = len(slice)
+ // jump 2
+ // 2:
+ // t1 = phi [1: -1:int, 3: t2]
+ // t2 = t1 + 1:int
+ // t3 = t2 < t0
+ // if t3 goto 3 else 1
+ // 3:
+ // t4 = f()
+ // t5 = h(t4)
+ // jump 2
+ //
+ // But earlier versions of the SSA construction algorithm would
+ // additionally generate this cycle of dead phis:
+ //
+ // 1:
+ // t7 = phi [0: nil:error, 2: t8] #e
+ // ...
+ // 2:
+ // t8 = phi [1: t7, 3: t4] #e
+ // ...
+
+ // Parse
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", input)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles("p", f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ // Create and build SSA
+ prog := ssautil.CreateProgram(lprog, 0)
+ p := prog.Package(lprog.Package("p").Pkg)
+ p.Build()
+ g := p.Func("g")
+
+ phis := 0
+ for _, b := range g.Blocks {
+ for _, instr := range b.Instrs {
+ if _, ok := instr.(*ssa.Phi); ok {
+ phis++
+ }
+ }
+ }
+ if phis != 1 {
+ g.WriteTo(os.Stderr)
+ t.Errorf("expected a single Phi (for the range index), got %d", phis)
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go
new file mode 100644
index 0000000..2870eea
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/const.go
@@ -0,0 +1,169 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the Const SSA value type.
+
+import (
+ "fmt"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "strconv"
+)
+
+// NewConst returns a new constant of the specified value and type.
+// val must be valid according to the specification of Const.Value.
+//
+func NewConst(val exact.Value, typ types.Type) *Const {
+ return &Const{typ, val}
+}
+
+// intConst returns an 'int' constant that evaluates to i.
+// (i is an int64 in case the host is narrower than the target.)
+func intConst(i int64) *Const {
+ return NewConst(exact.MakeInt64(i), tInt)
+}
+
+// nilConst returns a nil constant of the specified type, which may
+// be any reference type, including interfaces.
+//
+func nilConst(typ types.Type) *Const {
+ return NewConst(nil, typ)
+}
+
+// stringConst returns a 'string' constant that evaluates to s.
+func stringConst(s string) *Const {
+ return NewConst(exact.MakeString(s), tString)
+}
+
+// zeroConst returns a new "zero" constant of the specified type,
+// which must not be an array or struct type: the zero values of
+// aggregates are well-defined but cannot be represented by Const.
+//
+func zeroConst(t types.Type) *Const {
+ switch t := t.(type) {
+ case *types.Basic:
+ switch {
+ case t.Info()&types.IsBoolean != 0:
+ return NewConst(exact.MakeBool(false), t)
+ case t.Info()&types.IsNumeric != 0:
+ return NewConst(exact.MakeInt64(0), t)
+ case t.Info()&types.IsString != 0:
+ return NewConst(exact.MakeString(""), t)
+ case t.Kind() == types.UnsafePointer:
+ fallthrough
+ case t.Kind() == types.UntypedNil:
+ return nilConst(t)
+ default:
+ panic(fmt.Sprint("zeroConst for unexpected type:", t))
+ }
+ case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
+ return nilConst(t)
+ case *types.Named:
+ return NewConst(zeroConst(t.Underlying()).Value, t)
+ case *types.Array, *types.Struct, *types.Tuple:
+ panic(fmt.Sprint("zeroConst applied to aggregate:", t))
+ }
+ panic(fmt.Sprint("zeroConst: unexpected ", t))
+}
+
+func (c *Const) RelString(from *types.Package) string {
+ var s string
+ if c.Value == nil {
+ s = "nil"
+ } else if c.Value.Kind() == exact.String {
+ s = exact.StringVal(c.Value)
+ const max = 20
+ // TODO(adonovan): don't cut a rune in half.
+ if len(s) > max {
+ s = s[:max-3] + "..." // abbreviate
+ }
+ s = strconv.Quote(s)
+ } else {
+ s = c.Value.String()
+ }
+ return s + ":" + relType(c.Type(), from)
+}
+
+func (c *Const) Name() string {
+ return c.RelString(nil)
+}
+
+func (c *Const) String() string {
+ return c.Name()
+}
+
+func (c *Const) Type() types.Type {
+ return c.typ
+}
+
+func (c *Const) Referrers() *[]Instruction {
+ return nil
+}
+
+func (c *Const) Parent() *Function { return nil }
+
+func (c *Const) Pos() token.Pos {
+ return token.NoPos
+}
+
+// IsNil returns true if this constant represents a typed or untyped nil value.
+func (c *Const) IsNil() bool {
+ return c.Value == nil
+}
+
+// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp.
+
+// Int64 returns the numeric value of this constant truncated to fit
+// a signed 64-bit integer.
+//
+func (c *Const) Int64() int64 {
+ switch x := exact.ToInt(c.Value); x.Kind() {
+ case exact.Int:
+ if i, ok := exact.Int64Val(x); ok {
+ return i
+ }
+ return 0
+ case exact.Float:
+ f, _ := exact.Float64Val(x)
+ return int64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Uint64 returns the numeric value of this constant truncated to fit
+// an unsigned 64-bit integer.
+//
+func (c *Const) Uint64() uint64 {
+ switch x := exact.ToInt(c.Value); x.Kind() {
+ case exact.Int:
+ if u, ok := exact.Uint64Val(x); ok {
+ return u
+ }
+ return 0
+ case exact.Float:
+ f, _ := exact.Float64Val(x)
+ return uint64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Float64 returns the numeric value of this constant truncated to fit
+// a float64.
+//
+func (c *Const) Float64() float64 {
+ f, _ := exact.Float64Val(c.Value)
+ return f
+}
+
+// Complex128 returns the complex value of this constant truncated to
+// fit a complex128.
+//
+func (c *Const) Complex128() complex128 {
+ re, _ := exact.Float64Val(exact.Real(c.Value))
+ im, _ := exact.Float64Val(exact.Imag(c.Value))
+ return complex(re, im)
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go
new file mode 100644
index 0000000..69ac12b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/create.go
@@ -0,0 +1,263 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the CREATE phase of SSA construction.
+// See builder.go for explanation.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "os"
+ "sync"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// NewProgram returns a new SSA Program.
+//
+// mode controls diagnostics and checking during SSA construction.
+//
+func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
+ prog := &Program{
+ Fset: fset,
+ imported: make(map[string]*Package),
+ packages: make(map[*types.Package]*Package),
+ thunks: make(map[selectionKey]*Function),
+ bounds: make(map[*types.Func]*Function),
+ mode: mode,
+ }
+
+ h := typeutil.MakeHasher() // protected by methodsMu, in effect
+ prog.methodSets.SetHasher(h)
+ prog.canon.SetHasher(h)
+
+ return prog
+}
+
+// memberFromObject populates package pkg with a member for the
+// typechecker object obj.
+//
+// For objects from Go source code, syntax is the associated syntax
+// tree (for funcs and vars only); it will be used during the build
+// phase.
+//
+func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
+ name := obj.Name()
+ switch obj := obj.(type) {
+ case *types.Builtin:
+ if pkg.Pkg != types.Unsafe {
+ panic("unexpected builtin object: " + obj.String())
+ }
+
+ case *types.TypeName:
+ pkg.Members[name] = &Type{
+ object: obj,
+ pkg: pkg,
+ }
+
+ case *types.Const:
+ c := &NamedConst{
+ object: obj,
+ Value: NewConst(obj.Val(), obj.Type()),
+ pkg: pkg,
+ }
+ pkg.values[obj] = c.Value
+ pkg.Members[name] = c
+
+ case *types.Var:
+ g := &Global{
+ Pkg: pkg,
+ name: name,
+ object: obj,
+ typ: types.NewPointer(obj.Type()), // address
+ pos: obj.Pos(),
+ }
+ pkg.values[obj] = g
+ pkg.Members[name] = g
+
+ case *types.Func:
+ sig := obj.Type().(*types.Signature)
+ if sig.Recv() == nil && name == "init" {
+ pkg.ninit++
+ name = fmt.Sprintf("init#%d", pkg.ninit)
+ }
+ fn := &Function{
+ name: name,
+ object: obj,
+ Signature: sig,
+ syntax: syntax,
+ pos: obj.Pos(),
+ Pkg: pkg,
+ Prog: pkg.Prog,
+ }
+ if syntax == nil {
+ fn.Synthetic = "loaded from gc object file"
+ }
+
+ pkg.values[obj] = fn
+ if sig.Recv() == nil {
+ pkg.Members[name] = fn // package-level function
+ }
+
+ default: // (incl. *types.Package)
+ panic("unexpected Object type: " + obj.String())
+ }
+}
+
+// membersFromDecl populates package pkg with members for each
+// typechecker object (var, func, const or type) associated with the
+// specified decl.
+//
+func membersFromDecl(pkg *Package, decl ast.Decl) {
+ switch decl := decl.(type) {
+ case *ast.GenDecl: // import, const, type or var
+ switch decl.Tok {
+ case token.CONST:
+ for _, spec := range decl.Specs {
+ for _, id := range spec.(*ast.ValueSpec).Names {
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], nil)
+ }
+ }
+ }
+
+ case token.VAR:
+ for _, spec := range decl.Specs {
+ for _, id := range spec.(*ast.ValueSpec).Names {
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], spec)
+ }
+ }
+ }
+
+ case token.TYPE:
+ for _, spec := range decl.Specs {
+ id := spec.(*ast.TypeSpec).Name
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], nil)
+ }
+ }
+ }
+
+ case *ast.FuncDecl:
+ id := decl.Name
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], decl)
+ }
+ }
+}
+
+// CreatePackage constructs and returns an SSA Package from the
+// specified type-checked, error-free file ASTs, and populates its
+// Members mapping.
+//
+// importable determines whether this package should be returned by a
+// subsequent call to ImportedPackage(pkg.Path()).
+//
+// The real work of building SSA form for each function is not done
+// until a subsequent call to Package.Build().
+//
+func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
+ p := &Package{
+ Prog: prog,
+ Members: make(map[string]Member),
+ values: make(map[types.Object]Value),
+ Pkg: pkg,
+ info: info, // transient (CREATE and BUILD phases)
+ files: files, // transient (CREATE and BUILD phases)
+ }
+
+ // Add init() function.
+ p.init = &Function{
+ name: "init",
+ Signature: new(types.Signature),
+ Synthetic: "package initializer",
+ Pkg: p,
+ Prog: prog,
+ }
+ p.Members[p.init.name] = p.init
+
+ // CREATE phase.
+ // Allocate all package members: vars, funcs, consts and types.
+ if len(files) > 0 {
+ // Go source package.
+ for _, file := range files {
+ for _, decl := range file.Decls {
+ membersFromDecl(p, decl)
+ }
+ }
+ } else {
+ // GC-compiled binary package (or "unsafe")
+ // No code.
+ // No position information.
+ scope := p.Pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ memberFromObject(p, obj, nil)
+ if obj, ok := obj.(*types.TypeName); ok {
+ if named, ok := obj.Type().(*types.Named); ok {
+ for i, n := 0, named.NumMethods(); i < n; i++ {
+ memberFromObject(p, named.Method(i), nil)
+ }
+ }
+ }
+ }
+ }
+
+ if prog.mode&BareInits == 0 {
+ // Add initializer guard variable.
+ initguard := &Global{
+ Pkg: p,
+ name: "init$guard",
+ typ: types.NewPointer(tBool),
+ }
+ p.Members[initguard.Name()] = initguard
+ }
+
+ if prog.mode&GlobalDebug != 0 {
+ p.SetDebugMode(true)
+ }
+
+ if prog.mode&PrintPackages != 0 {
+ printMu.Lock()
+ p.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
+
+ if importable {
+ prog.imported[p.Pkg.Path()] = p
+ }
+ prog.packages[p.Pkg] = p
+
+ return p
+}
+
+// printMu serializes printing of Packages/Functions to stdout.
+var printMu sync.Mutex
+
+// AllPackages returns a new slice containing all packages in the
+// program prog in unspecified order.
+//
+func (prog *Program) AllPackages() []*Package {
+ pkgs := make([]*Package, 0, len(prog.packages))
+ for _, pkg := range prog.packages {
+ pkgs = append(pkgs, pkg)
+ }
+ return pkgs
+}
+
+// ImportedPackage returns the importable SSA Package whose import
+// path is path, or nil if no such SSA package has been created.
+//
+// Not all packages are importable. For example, no import
+// declaration can resolve to the x_test package created by 'go test'
+// or the ad-hoc main package created 'go build foo.go'.
+//
+func (prog *Program) ImportedPackage(path string) *Package {
+ return prog.imported[path]
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go
new file mode 100644
index 0000000..2aa04f4
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/doc.go
@@ -0,0 +1,123 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ssa defines a representation of the elements of Go programs
+// (packages, types, functions, variables and constants) using a
+// static single-assignment (SSA) form intermediate representation
+// (IR) for the bodies of functions.
+//
+// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
+//
+// For an introduction to SSA form, see
+// http://en.wikipedia.org/wiki/Static_single_assignment_form.
+// This page provides a broader reading list:
+// http://www.dcs.gla.ac.uk/~jsinger/ssa.html.
+//
+// The level of abstraction of the SSA form is intentionally close to
+// the source language to facilitate construction of source analysis
+// tools. It is not intended for machine code generation.
+//
+// All looping, branching and switching constructs are replaced with
+// unstructured control flow. Higher-level control flow constructs
+// such as multi-way branch can be reconstructed as needed; see
+// ssautil.Switches() for an example.
+//
+// To construct an SSA-form program, call ssautil.CreateProgram on a
+// loader.Program, a set of type-checked packages created from
+// parsed Go source files. The resulting ssa.Program contains all the
+// packages and their members, but SSA code is not created for
+// function bodies until a subsequent call to (*Package).Build.
+//
+// The builder initially builds a naive SSA form in which all local
+// variables are addresses of stack locations with explicit loads and
+// stores. Registerisation of eligible locals and φ-node insertion
+// using dominance and dataflow are then performed as a second pass
+// called "lifting" to improve the accuracy and performance of
+// subsequent analyses; this pass can be skipped by setting the
+// NaiveForm builder flag.
+//
+// The primary interfaces of this package are:
+//
+// - Member: a named member of a Go package.
+// - Value: an expression that yields a value.
+// - Instruction: a statement that consumes values and performs computation.
+// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
+//
+// A computation that yields a result implements both the Value and
+// Instruction interfaces. The following table shows for each
+// concrete type which of these interfaces it implements.
+//
+// Value? Instruction? Member?
+// *Alloc ✔ ✔
+// *BinOp ✔ ✔
+// *Builtin ✔
+// *Call ✔ ✔
+// *ChangeInterface ✔ ✔
+// *ChangeType ✔ ✔
+// *Const ✔
+// *Convert ✔ ✔
+// *DebugRef ✔
+// *Defer ✔
+// *Extract ✔ ✔
+// *Field ✔ ✔
+// *FieldAddr ✔ ✔
+// *FreeVar ✔
+// *Function ✔ ✔ (func)
+// *Global ✔ ✔ (var)
+// *Go ✔
+// *If ✔
+// *Index ✔ ✔
+// *IndexAddr ✔ ✔
+// *Jump ✔
+// *Lookup ✔ ✔
+// *MakeChan ✔ ✔
+// *MakeClosure ✔ ✔
+// *MakeInterface ✔ ✔
+// *MakeMap ✔ ✔
+// *MakeSlice ✔ ✔
+// *MapUpdate ✔
+// *NamedConst ✔ (const)
+// *Next ✔ ✔
+// *Panic ✔
+// *Parameter ✔
+// *Phi ✔ ✔
+// *Range ✔ ✔
+// *Return ✔
+// *RunDefers ✔
+// *Select ✔ ✔
+// *Send ✔
+// *Slice ✔ ✔
+// *Store ✔
+// *Type ✔ (type)
+// *TypeAssert ✔ ✔
+// *UnOp ✔ ✔
+//
+// Other key types in this package include: Program, Package, Function
+// and BasicBlock.
+//
+// The program representation constructed by this package is fully
+// resolved internally, i.e. it does not rely on the names of Values,
+// Packages, Functions, Types or BasicBlocks for the correct
+// interpretation of the program. Only the identities of objects and
+// the topology of the SSA and type graphs are semantically
+// significant. (There is one exception: Ids, used to identify field
+// and method names, contain strings.) Avoidance of name-based
+// operations simplifies the implementation of subsequent passes and
+// can make them very efficient. Many objects are nonetheless named
+// to aid in debugging, but it is not essential that the names be
+// either accurate or unambiguous. The public API exposes a number of
+// name-based maps for client convenience.
+//
+// The ssa/ssautil package provides various utilities that depend only
+// on the public API of this package.
+//
+// TODO(adonovan): Consider the exceptional control-flow implications
+// of defer and recover().
+//
+// TODO(adonovan): write a how-to document for all the various cases
+// of trying to determine corresponding elements across the four
+// domains of source locations, ast.Nodes, types.Objects,
+// ssa.Values/Instructions.
+//
+package ssa // import "golang.org/x/tools/go/ssa"
diff --git a/vendor/golang.org/x/tools/go/ssa/dom.go b/vendor/golang.org/x/tools/go/ssa/dom.go
new file mode 100644
index 0000000..12ef430
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/dom.go
@@ -0,0 +1,341 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines algorithms related to dominance.
+
+// Dominator tree construction ----------------------------------------
+//
+// We use the algorithm described in Lengauer & Tarjan. 1979. A fast
+// algorithm for finding dominators in a flowgraph.
+// http://doi.acm.org/10.1145/357062.357071
+//
+// We also apply the optimizations to SLT described in Georgiadis et
+// al, Finding Dominators in Practice, JGAA 2006,
+// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf
+// to avoid the need for buckets of size > 1.
+
+import (
+ "bytes"
+ "fmt"
+ "math/big"
+ "os"
+ "sort"
+)
+
+// Idom returns the block that immediately dominates b:
+// its parent in the dominator tree, if any.
+// Neither the entry node (b.Index==0) nor recover node
+// (b==b.Parent().Recover()) have a parent.
+//
+func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
+
+// Dominees returns the list of blocks that b immediately dominates:
+// its children in the dominator tree.
+//
+func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children }
+
+// Dominates reports whether b dominates c.
+func (b *BasicBlock) Dominates(c *BasicBlock) bool {
+ return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post
+}
+
+type byDomPreorder []*BasicBlock
+
+func (a byDomPreorder) Len() int { return len(a) }
+func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre }
+
+// DomPreorder returns a new slice containing the blocks of f in
+// dominator tree preorder.
+//
+func (f *Function) DomPreorder() []*BasicBlock {
+ n := len(f.Blocks)
+ order := make(byDomPreorder, n, n)
+ copy(order, f.Blocks)
+ sort.Sort(order)
+ return order
+}
+
+// domInfo contains a BasicBlock's dominance information.
+type domInfo struct {
+ idom *BasicBlock // immediate dominator (parent in domtree)
+ children []*BasicBlock // nodes immediately dominated by this one
+ pre, post int32 // pre- and post-order numbering within domtree
+}
+
+// ltState holds the working state for Lengauer-Tarjan algorithm
+// (during which domInfo.pre is repurposed for CFG DFS preorder number).
+type ltState struct {
+ // Each slice is indexed by b.Index.
+ sdom []*BasicBlock // b's semidominator
+ parent []*BasicBlock // b's parent in DFS traversal of CFG
+ ancestor []*BasicBlock // b's ancestor with least sdom
+}
+
+// dfs implements the depth-first search part of the LT algorithm.
+func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 {
+ preorder[i] = v
+ v.dom.pre = i // For now: DFS preorder of spanning tree of CFG
+ i++
+ lt.sdom[v.Index] = v
+ lt.link(nil, v)
+ for _, w := range v.Succs {
+ if lt.sdom[w.Index] == nil {
+ lt.parent[w.Index] = v
+ i = lt.dfs(w, i, preorder)
+ }
+ }
+ return i
+}
+
+// eval implements the EVAL part of the LT algorithm.
+func (lt *ltState) eval(v *BasicBlock) *BasicBlock {
+ // TODO(adonovan): opt: do path compression per simple LT.
+ u := v
+ for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] {
+ if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre {
+ u = v
+ }
+ }
+ return u
+}
+
+// link implements the LINK part of the LT algorithm.
+func (lt *ltState) link(v, w *BasicBlock) {
+ lt.ancestor[w.Index] = v
+}
+
+// buildDomTree computes the dominator tree of f using the LT algorithm.
+// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
+//
+func buildDomTree(f *Function) {
+ // The step numbers refer to the original LT paper; the
+ // reordering is due to Georgiadis.
+
+ // Clear any previous domInfo.
+ for _, b := range f.Blocks {
+ b.dom = domInfo{}
+ }
+
+ n := len(f.Blocks)
+ // Allocate space for 5 contiguous [n]*BasicBlock arrays:
+ // sdom, parent, ancestor, preorder, buckets.
+ space := make([]*BasicBlock, 5*n, 5*n)
+ lt := ltState{
+ sdom: space[0:n],
+ parent: space[n : 2*n],
+ ancestor: space[2*n : 3*n],
+ }
+
+ // Step 1. Number vertices by depth-first preorder.
+ preorder := space[3*n : 4*n]
+ root := f.Blocks[0]
+ prenum := lt.dfs(root, 0, preorder)
+ recover := f.Recover
+ if recover != nil {
+ lt.dfs(recover, prenum, preorder)
+ }
+
+ buckets := space[4*n : 5*n]
+ copy(buckets, preorder)
+
+ // In reverse preorder...
+ for i := int32(n) - 1; i > 0; i-- {
+ w := preorder[i]
+
+ // Step 3. Implicitly define the immediate dominator of each node.
+ for v := buckets[i]; v != w; v = buckets[v.dom.pre] {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < i {
+ v.dom.idom = u
+ } else {
+ v.dom.idom = w
+ }
+ }
+
+ // Step 2. Compute the semidominators of all nodes.
+ lt.sdom[w.Index] = lt.parent[w.Index]
+ for _, v := range w.Preds {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre {
+ lt.sdom[w.Index] = lt.sdom[u.Index]
+ }
+ }
+
+ lt.link(lt.parent[w.Index], w)
+
+ if lt.parent[w.Index] == lt.sdom[w.Index] {
+ w.dom.idom = lt.parent[w.Index]
+ } else {
+ buckets[i] = buckets[lt.sdom[w.Index].dom.pre]
+ buckets[lt.sdom[w.Index].dom.pre] = w
+ }
+ }
+
+ // The final 'Step 3' is now outside the loop.
+ for v := buckets[0]; v != root; v = buckets[v.dom.pre] {
+ v.dom.idom = root
+ }
+
+ // Step 4. Explicitly define the immediate dominator of each
+ // node, in preorder.
+ for _, w := range preorder[1:] {
+ if w == root || w == recover {
+ w.dom.idom = nil
+ } else {
+ if w.dom.idom != lt.sdom[w.Index] {
+ w.dom.idom = w.dom.idom.dom.idom
+ }
+ // Calculate Children relation as inverse of Idom.
+ w.dom.idom.dom.children = append(w.dom.idom.dom.children, w)
+ }
+ }
+
+ pre, post := numberDomTree(root, 0, 0)
+ if recover != nil {
+ numberDomTree(recover, pre, post)
+ }
+
+ // printDomTreeDot(os.Stderr, f) // debugging
+ // printDomTreeText(os.Stderr, root, 0) // debugging
+
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckDomTree(f)
+ }
+}
+
+// numberDomTree sets the pre- and post-order numbers of a depth-first
+// traversal of the dominator tree rooted at v. These are used to
+// answer dominance queries in constant time.
+//
+func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
+ v.dom.pre = pre
+ pre++
+ for _, child := range v.dom.children {
+ pre, post = numberDomTree(child, pre, post)
+ }
+ v.dom.post = post
+ post++
+ return pre, post
+}
+
+// Testing utilities ----------------------------------------
+
+// sanityCheckDomTree checks the correctness of the dominator tree
+// computed by the LT algorithm by comparing against the dominance
+// relation computed by a naive Kildall-style forward dataflow
+// analysis (Algorithm 10.16 from the "Dragon" book).
+//
+func sanityCheckDomTree(f *Function) {
+ n := len(f.Blocks)
+
+ // D[i] is the set of blocks that dominate f.Blocks[i],
+ // represented as a bit-set of block indices.
+ D := make([]big.Int, n)
+
+ one := big.NewInt(1)
+
+ // all is the set of all blocks; constant.
+ var all big.Int
+ all.Set(one).Lsh(&all, uint(n)).Sub(&all, one)
+
+ // Initialization.
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
+ // A root is dominated only by itself.
+ D[i].SetBit(&D[0], 0, 1)
+ } else {
+ // All other blocks are (initially) dominated
+ // by every block.
+ D[i].Set(&all)
+ }
+ }
+
+ // Iteration until fixed point.
+ for changed := true; changed; {
+ changed = false
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
+ continue
+ }
+ // Compute intersection across predecessors.
+ var x big.Int
+ x.Set(&all)
+ for _, pred := range b.Preds {
+ x.And(&x, &D[pred.Index])
+ }
+ x.SetBit(&x, i, 1) // a block always dominates itself.
+ if D[i].Cmp(&x) != 0 {
+ D[i].Set(&x)
+ changed = true
+ }
+ }
+ }
+
+ // Check the entire relation. O(n^2).
+ // The Recover block (if any) must be treated specially so we skip it.
+ ok := true
+ for i := 0; i < n; i++ {
+ for j := 0; j < n; j++ {
+ b, c := f.Blocks[i], f.Blocks[j]
+ if c == f.Recover {
+ continue
+ }
+ actual := b.Dominates(c)
+ expected := D[j].Bit(i) == 1
+ if actual != expected {
+ fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected)
+ ok = false
+ }
+ }
+ }
+
+ preorder := f.DomPreorder()
+ for _, b := range f.Blocks {
+ if got := preorder[b.dom.pre]; got != b {
+ fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b)
+ ok = false
+ }
+ }
+
+ if !ok {
+ panic("sanityCheckDomTree failed for " + f.String())
+ }
+
+}
+
+// Printing functions ----------------------------------------
+
+// printDomTree prints the dominator tree as text, using indentation.
+func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
+ fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
+ for _, child := range v.dom.children {
+ printDomTreeText(buf, child, indent+1)
+ }
+}
+
+// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
+// (.dot) format.
+func printDomTreeDot(buf *bytes.Buffer, f *Function) {
+ fmt.Fprintln(buf, "//", f)
+ fmt.Fprintln(buf, "digraph domtree {")
+ for i, b := range f.Blocks {
+ v := b.dom
+ fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post)
+ // TODO(adonovan): improve appearance of edges
+ // belonging to both dominator tree and CFG.
+
+ // Dominator tree edge.
+ if i != 0 {
+ fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre)
+ }
+ // CFG edges.
+ for _, pred := range b.Preds {
+ fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre)
+ }
+ }
+ fmt.Fprintln(buf, "}")
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go
new file mode 100644
index 0000000..1036988
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/emit.go
@@ -0,0 +1,468 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// Helpers for emitting SSA instructions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// emitNew emits to f a new (heap Alloc) instruction allocating an
+// object of type typ. pos is the optional source location.
+//
+func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc {
+ v := &Alloc{Heap: true}
+ v.setType(types.NewPointer(typ))
+ v.setPos(pos)
+ f.emit(v)
+ return v
+}
+
+// emitLoad emits to f an instruction to load the address addr into a
+// new temporary, and returns the value so defined.
+//
+func emitLoad(f *Function, addr Value) *UnOp {
+ v := &UnOp{Op: token.MUL, X: addr}
+ v.setType(deref(addr.Type()))
+ f.emit(v)
+ return v
+}
+
+// emitDebugRef emits to f a DebugRef pseudo-instruction associating
+// expression e with value v.
+//
+func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
+ if !f.debugInfo() {
+ return // debugging not enabled
+ }
+ if v == nil || e == nil {
+ panic("nil")
+ }
+ var obj types.Object
+ e = unparen(e)
+ if id, ok := e.(*ast.Ident); ok {
+ if isBlankIdent(id) {
+ return
+ }
+ obj = f.Pkg.objectOf(id)
+ switch obj.(type) {
+ case *types.Nil, *types.Const, *types.Builtin:
+ return
+ }
+ }
+ f.emit(&DebugRef{
+ X: v,
+ Expr: e,
+ IsAddr: isAddr,
+ object: obj,
+ })
+}
+
+// emitArith emits to f code to compute the binary operation op(x, y)
+// where op is an eager shift, logical or arithmetic operation.
+// (Use emitCompare() for comparisons and Builder.logicalBinop() for
+// non-eager operations.)
+//
+func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
+ switch op {
+ case token.SHL, token.SHR:
+ x = emitConv(f, x, t)
+ // y may be signed or an 'untyped' constant.
+ // TODO(adonovan): whence signed values?
+ if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 {
+ y = emitConv(f, y, types.Typ[types.Uint64])
+ }
+
+ case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+ x = emitConv(f, x, t)
+ y = emitConv(f, y, t)
+
+ default:
+ panic("illegal op in emitArith: " + op.String())
+
+ }
+ v := &BinOp{
+ Op: op,
+ X: x,
+ Y: y,
+ }
+ v.setPos(pos)
+ v.setType(t)
+ return f.emit(v)
+}
+
+// emitCompare emits to f code compute the boolean result of
+// comparison comparison 'x op y'.
+//
+func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
+ xt := x.Type().Underlying()
+ yt := y.Type().Underlying()
+
+ // Special case to optimise a tagless SwitchStmt so that
+ // these are equivalent
+ // switch { case e: ...}
+ // switch true { case e: ... }
+ // if e==true { ... }
+ // even in the case when e's type is an interface.
+ // TODO(adonovan): opt: generalise to x==true, false!=y, etc.
+ if x == vTrue && op == token.EQL {
+ if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 {
+ return y
+ }
+ }
+
+ if types.Identical(xt, yt) {
+ // no conversion necessary
+ } else if _, ok := xt.(*types.Interface); ok {
+ y = emitConv(f, y, x.Type())
+ } else if _, ok := yt.(*types.Interface); ok {
+ x = emitConv(f, x, y.Type())
+ } else if _, ok := x.(*Const); ok {
+ x = emitConv(f, x, y.Type())
+ } else if _, ok := y.(*Const); ok {
+ y = emitConv(f, y, x.Type())
+ } else {
+ // other cases, e.g. channels. No-op.
+ }
+
+ v := &BinOp{
+ Op: op,
+ X: x,
+ Y: y,
+ }
+ v.setPos(pos)
+ v.setType(tBool)
+ return f.emit(v)
+}
+
+// isValuePreserving returns true if a conversion from ut_src to
+// ut_dst is value-preserving, i.e. just a change of type.
+// Precondition: neither argument is a named type.
+//
+func isValuePreserving(ut_src, ut_dst types.Type) bool {
+ // Identical underlying types?
+ if structTypesIdentical(ut_dst, ut_src) {
+ return true
+ }
+
+ switch ut_dst.(type) {
+ case *types.Chan:
+ // Conversion between channel types?
+ _, ok := ut_src.(*types.Chan)
+ return ok
+
+ case *types.Pointer:
+ // Conversion between pointers with identical base types?
+ _, ok := ut_src.(*types.Pointer)
+ return ok
+ }
+ return false
+}
+
+// emitConv emits to f code to convert Value val to exactly type typ,
+// and returns the converted value. Implicit conversions are required
+// by language assignability rules in assignments, parameter passing,
+// etc. Conversions cannot fail dynamically.
+//
+func emitConv(f *Function, val Value, typ types.Type) Value {
+ t_src := val.Type()
+
+ // Identical types? Conversion is a no-op.
+ if types.Identical(t_src, typ) {
+ return val
+ }
+
+ ut_dst := typ.Underlying()
+ ut_src := t_src.Underlying()
+
+ // Just a change of type, but not value or representation?
+ if isValuePreserving(ut_src, ut_dst) {
+ c := &ChangeType{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ // Conversion to, or construction of a value of, an interface type?
+ if _, ok := ut_dst.(*types.Interface); ok {
+ // Assignment from one interface type to another?
+ if _, ok := ut_src.(*types.Interface); ok {
+ c := &ChangeInterface{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ // Untyped nil constant? Return interface-typed nil constant.
+ if ut_src == tUntypedNil {
+ return nilConst(typ)
+ }
+
+ // Convert (non-nil) "untyped" literals to their default type.
+ if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 {
+ val = emitConv(f, val, DefaultType(ut_src))
+ }
+
+ f.Pkg.Prog.needMethodsOf(val.Type())
+ mi := &MakeInterface{X: val}
+ mi.setType(typ)
+ return f.emit(mi)
+ }
+
+ // Conversion of a compile-time constant value?
+ if c, ok := val.(*Const); ok {
+ if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() {
+ // Conversion of a compile-time constant to
+ // another constant type results in a new
+ // constant of the destination type and
+ // (initially) the same abstract value.
+ // We don't truncate the value yet.
+ return NewConst(c.Value, typ)
+ }
+
+ // We're converting from constant to non-constant type,
+ // e.g. string -> []byte/[]rune.
+ }
+
+ // A representation-changing conversion?
+ // At least one of {ut_src,ut_dst} must be *Basic.
+ // (The other may be []byte or []rune.)
+ _, ok1 := ut_src.(*types.Basic)
+ _, ok2 := ut_dst.(*types.Basic)
+ if ok1 || ok2 {
+ c := &Convert{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
+}
+
+// emitStore emits to f an instruction to store value val at location
+// addr, applying implicit conversions as required by assignability rules.
+//
+func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
+ s := &Store{
+ Addr: addr,
+ Val: emitConv(f, val, deref(addr.Type())),
+ pos: pos,
+ }
+ f.emit(s)
+ return s
+}
+
+// emitJump emits to f a jump to target, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+//
+func emitJump(f *Function, target *BasicBlock) {
+ b := f.currentBlock
+ b.emit(new(Jump))
+ addEdge(b, target)
+ f.currentBlock = nil
+}
+
+// emitIf emits to f a conditional jump to tblock or fblock based on
+// cond, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+//
+func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
+ b := f.currentBlock
+ b.emit(&If{Cond: cond})
+ addEdge(b, tblock)
+ addEdge(b, fblock)
+ f.currentBlock = nil
+}
+
+// emitExtract emits to f an instruction to extract the index'th
+// component of tuple. It returns the extracted value.
+//
+func emitExtract(f *Function, tuple Value, index int) Value {
+ e := &Extract{Tuple: tuple, Index: index}
+ e.setType(tuple.Type().(*types.Tuple).At(index).Type())
+ return f.emit(e)
+}
+
+// emitTypeAssert emits to f a type assertion value := x.(t) and
+// returns the value. x.Type() must be an interface.
+//
+func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
+ a := &TypeAssert{X: x, AssertedType: t}
+ a.setPos(pos)
+ a.setType(t)
+ return f.emit(a)
+}
+
+// emitTypeTest emits to f a type test value,ok := x.(t) and returns
+// a (value, ok) tuple. x.Type() must be an interface.
+//
+func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
+ a := &TypeAssert{
+ X: x,
+ AssertedType: t,
+ CommaOk: true,
+ }
+ a.setPos(pos)
+ a.setType(types.NewTuple(
+ newVar("value", t),
+ varOk,
+ ))
+ return f.emit(a)
+}
+
+// emitTailCall emits to f a function call in tail position. The
+// caller is responsible for all fields of 'call' except its type.
+// Intended for wrapper methods.
+// Precondition: f does/will not use deferred procedure calls.
+// Postcondition: f.currentBlock is nil.
+//
+func emitTailCall(f *Function, call *Call) {
+ tresults := f.Signature.Results()
+ nr := tresults.Len()
+ if nr == 1 {
+ call.typ = tresults.At(0).Type()
+ } else {
+ call.typ = tresults
+ }
+ tuple := f.emit(call)
+ var ret Return
+ switch nr {
+ case 0:
+ // no-op
+ case 1:
+ ret.Results = []Value{tuple}
+ default:
+ for i := 0; i < nr; i++ {
+ v := emitExtract(f, tuple, i)
+ // TODO(adonovan): in principle, this is required:
+ // v = emitConv(f, o.Type, f.Signature.Results[i].Type)
+ // but in practice emitTailCall is only used when
+ // the types exactly match.
+ ret.Results = append(ret.Results, v)
+ }
+ }
+ f.emit(&ret)
+ f.currentBlock = nil
+}
+
+// emitImplicitSelections emits to f code to apply the sequence of
+// implicit field selections specified by indices to base value v, and
+// returns the selected value.
+//
+// If v is the address of a struct, the result will be the address of
+// a field; if it is the value of a struct, the result will be the
+// value of a field.
+//
+func emitImplicitSelections(f *Function, v Value, indices []int) Value {
+ for _, index := range indices {
+ fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
+
+ if isPointer(v.Type()) {
+ instr := &FieldAddr{
+ X: v,
+ Field: index,
+ }
+ instr.setType(types.NewPointer(fld.Type()))
+ v = f.emit(instr)
+ // Load the field's value iff indirectly embedded.
+ if isPointer(fld.Type()) {
+ v = emitLoad(f, v)
+ }
+ } else {
+ instr := &Field{
+ X: v,
+ Field: index,
+ }
+ instr.setType(fld.Type())
+ v = f.emit(instr)
+ }
+ }
+ return v
+}
+
+// emitFieldSelection emits to f code to select the index'th field of v.
+//
+// If wantAddr, the input must be a pointer-to-struct and the result
+// will be the field's address; otherwise the result will be the
+// field's value.
+// Ident id is used for position and debug info.
+//
+func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value {
+ fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
+ if isPointer(v.Type()) {
+ instr := &FieldAddr{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(id.Pos())
+ instr.setType(types.NewPointer(fld.Type()))
+ v = f.emit(instr)
+ // Load the field's value iff we don't want its address.
+ if !wantAddr {
+ v = emitLoad(f, v)
+ }
+ } else {
+ instr := &Field{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(id.Pos())
+ instr.setType(fld.Type())
+ v = f.emit(instr)
+ }
+ emitDebugRef(f, id, v, wantAddr)
+ return v
+}
+
+// zeroValue emits to f code to produce a zero value of type t,
+// and returns it.
+//
+func zeroValue(f *Function, t types.Type) Value {
+ switch t.Underlying().(type) {
+ case *types.Struct, *types.Array:
+ return emitLoad(f, f.addLocal(t, token.NoPos))
+ default:
+ return zeroConst(t)
+ }
+}
+
+// createRecoverBlock emits to f a block of code to return after a
+// recovered panic, and sets f.Recover to it.
+//
+// If f's result parameters are named, the code loads and returns
+// their current values, otherwise it returns the zero values of their
+// type.
+//
+// Idempotent.
+//
+func createRecoverBlock(f *Function) {
+ if f.Recover != nil {
+ return // already created
+ }
+ saved := f.currentBlock
+
+ f.Recover = f.newBasicBlock("recover")
+ f.currentBlock = f.Recover
+
+ var results []Value
+ if f.namedResults != nil {
+ // Reload NRPs to form value tuple.
+ for _, r := range f.namedResults {
+ results = append(results, emitLoad(f, r))
+ }
+ } else {
+ R := f.Signature.Results()
+ for i, n := 0, R.Len(); i < n; i++ {
+ T := R.At(i).Type()
+
+ // Return zero value of each result type.
+ results = append(results, zeroValue(f, T))
+ }
+ }
+ f.emit(&Return{Results: results})
+
+ f.currentBlock = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/example_test.go b/vendor/golang.org/x/tools/go/ssa/example_test.go
new file mode 100644
index 0000000..31fa561
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/example_test.go
@@ -0,0 +1,138 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+import (
+ "fmt"
+ "go/ast"
+ "go/importer"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+const hello = `
+package main
+
+import "fmt"
+
+const message = "Hello, World!"
+
+func main() {
+ fmt.Println(message)
+}
+`
+
+// This program demonstrates how to run the SSA builder on a single
+// package of one or more already-parsed files. Its dependencies are
+// loaded from compiler export data. This is what you'd typically use
+// for a compiler; it does not depend on golang.org/x/tools/go/loader.
+//
+// It shows the printed representation of packages, functions, and
+// instructions. Within the function listing, the name of each
+// BasicBlock such as ".0.entry" is printed left-aligned, followed by
+// the block's Instructions.
+//
+// For each instruction that defines an SSA virtual register
+// (i.e. implements Value), the type of that value is shown in the
+// right column.
+//
+// Build and run the ssadump.go program if you want a standalone tool
+// with similar functionality. It is located at
+// golang.org/x/tools/cmd/ssadump.
+//
+func ExampleBuildPackage() {
+ // Parse the source files.
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "hello.go", hello, parser.ParseComments)
+ if err != nil {
+ fmt.Print(err) // parse error
+ return
+ }
+ files := []*ast.File{f}
+
+ // Create the type-checker's package.
+ pkg := types.NewPackage("hello", "")
+
+ // Type-check the package, load dependencies.
+ // Create and build the SSA program.
+ hello, _, err := ssautil.BuildPackage(
+ &types.Config{Importer: importer.Default()}, fset, pkg, files, ssa.SanityCheckFunctions)
+ if err != nil {
+ fmt.Print(err) // type error in some package
+ return
+ }
+
+ // Print out the package.
+ hello.WriteTo(os.Stdout)
+
+ // Print out the package-level functions.
+ hello.Func("init").WriteTo(os.Stdout)
+ hello.Func("main").WriteTo(os.Stdout)
+
+ // Output:
+ //
+ // package hello:
+ // func init func()
+ // var init$guard bool
+ // func main func()
+ // const message message = "Hello, World!":untyped string
+ //
+ // # Name: hello.init
+ // # Package: hello
+ // # Synthetic: package initializer
+ // func init():
+ // 0: entry P:0 S:2
+ // t0 = *init$guard bool
+ // if t0 goto 2 else 1
+ // 1: init.start P:1 S:1
+ // *init$guard = true:bool
+ // t1 = fmt.init() ()
+ // jump 2
+ // 2: init.done P:2 S:0
+ // return
+ //
+ // # Name: hello.main
+ // # Package: hello
+ // # Location: hello.go:8:6
+ // func main():
+ // 0: entry P:0 S:0
+ // t0 = new [1]interface{} (varargs) *[1]interface{}
+ // t1 = &t0[0:int] *interface{}
+ // t2 = make interface{} <- string ("Hello, World!":string) interface{}
+ // *t1 = t2
+ // t3 = slice t0[:] []interface{}
+ // t4 = fmt.Println(t3...) (n int, err error)
+ // return
+}
+
+// This program shows how to load a main package (cmd/cover) and all its
+// dependencies from source, using the loader, and then build SSA code
+// for the entire program. This is what you'd typically use for a
+// whole-program analysis.
+//
+func ExampleLoadProgram() {
+ // Load cmd/cover and its dependencies.
+ var conf loader.Config
+ conf.Import("cmd/cover")
+ lprog, err := conf.Load()
+ if err != nil {
+ fmt.Print(err) // type error in some package
+ return
+ }
+
+ // Create SSA-form program representation.
+ prog := ssautil.CreateProgram(lprog, ssa.SanityCheckFunctions)
+
+ // Build SSA code for the entire cmd/cover program.
+ prog.Build()
+
+ // Output:
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go
new file mode 100644
index 0000000..b21ff4e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/func.go
@@ -0,0 +1,689 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the Function and BasicBlock types.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+ "strings"
+)
+
+// addEdge adds a control-flow graph edge from from to to.
+func addEdge(from, to *BasicBlock) {
+ from.Succs = append(from.Succs, to)
+ to.Preds = append(to.Preds, from)
+}
+
+// Parent returns the function that contains block b.
+func (b *BasicBlock) Parent() *Function { return b.parent }
+
+// String returns a human-readable label of this block.
+// It is not guaranteed unique within the function.
+//
+func (b *BasicBlock) String() string {
+ return fmt.Sprintf("%d", b.Index)
+}
+
+// emit appends an instruction to the current basic block.
+// If the instruction defines a Value, it is returned.
+//
+func (b *BasicBlock) emit(i Instruction) Value {
+ i.setBlock(b)
+ b.Instrs = append(b.Instrs, i)
+ v, _ := i.(Value)
+ return v
+}
+
+// predIndex returns the i such that b.Preds[i] == c or panics if
+// there is none.
+func (b *BasicBlock) predIndex(c *BasicBlock) int {
+ for i, pred := range b.Preds {
+ if pred == c {
+ return i
+ }
+ }
+ panic(fmt.Sprintf("no edge %s -> %s", c, b))
+}
+
+// hasPhi returns true if b.Instrs contains φ-nodes.
+func (b *BasicBlock) hasPhi() bool {
+ _, ok := b.Instrs[0].(*Phi)
+ return ok
+}
+
+// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
+func (b *BasicBlock) phis() []Instruction {
+ for i, instr := range b.Instrs {
+ if _, ok := instr.(*Phi); !ok {
+ return b.Instrs[:i]
+ }
+ }
+ return nil // unreachable in well-formed blocks
+}
+
+// replacePred replaces all occurrences of p in b's predecessor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replacePred(p, q *BasicBlock) {
+ for i, pred := range b.Preds {
+ if pred == p {
+ b.Preds[i] = q
+ }
+ }
+}
+
+// replaceSucc replaces all occurrences of p in b's successor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
+ for i, succ := range b.Succs {
+ if succ == p {
+ b.Succs[i] = q
+ }
+ }
+}
+
+// removePred removes all occurrences of p in b's
+// predecessor list and φ-nodes.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) removePred(p *BasicBlock) {
+ phis := b.phis()
+
+ // We must preserve edge order for φ-nodes.
+ j := 0
+ for i, pred := range b.Preds {
+ if pred != p {
+ b.Preds[j] = b.Preds[i]
+ // Strike out φ-edge too.
+ for _, instr := range phis {
+ phi := instr.(*Phi)
+ phi.Edges[j] = phi.Edges[i]
+ }
+ j++
+ }
+ }
+ // Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
+ for i := j; i < len(b.Preds); i++ {
+ b.Preds[i] = nil
+ for _, instr := range phis {
+ instr.(*Phi).Edges[i] = nil
+ }
+ }
+ b.Preds = b.Preds[:j]
+ for _, instr := range phis {
+ phi := instr.(*Phi)
+ phi.Edges = phi.Edges[:j]
+ }
+}
+
+// Destinations associated with unlabelled for/switch/select stmts.
+// We push/pop one of these as we enter/leave each construct and for
+// each BranchStmt we scan for the innermost target of the right type.
+//
+type targets struct {
+ tail *targets // rest of stack
+ _break *BasicBlock
+ _continue *BasicBlock
+ _fallthrough *BasicBlock
+}
+
+// Destinations associated with a labelled block.
+// We populate these as labels are encountered in forward gotos or
+// labelled statements.
+//
+type lblock struct {
+ _goto *BasicBlock
+ _break *BasicBlock
+ _continue *BasicBlock
+}
+
+// labelledBlock returns the branch target associated with the
+// specified label, creating it if needed.
+//
+func (f *Function) labelledBlock(label *ast.Ident) *lblock {
+ lb := f.lblocks[label.Obj]
+ if lb == nil {
+ lb = &lblock{_goto: f.newBasicBlock(label.Name)}
+ if f.lblocks == nil {
+ f.lblocks = make(map[*ast.Object]*lblock)
+ }
+ f.lblocks[label.Obj] = lb
+ }
+ return lb
+}
+
+// addParam adds a (non-escaping) parameter to f.Params of the
+// specified name, type and source position.
+//
+func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter {
+ v := &Parameter{
+ name: name,
+ typ: typ,
+ pos: pos,
+ parent: f,
+ }
+ f.Params = append(f.Params, v)
+ return v
+}
+
+func (f *Function) addParamObj(obj types.Object) *Parameter {
+ name := obj.Name()
+ if name == "" {
+ name = fmt.Sprintf("arg%d", len(f.Params))
+ }
+ param := f.addParam(name, obj.Type(), obj.Pos())
+ param.object = obj
+ return param
+}
+
+// addSpilledParam declares a parameter that is pre-spilled to the
+// stack; the function body will load/store the spilled location.
+// Subsequent lifting will eliminate spills where possible.
+//
+func (f *Function) addSpilledParam(obj types.Object) {
+ param := f.addParamObj(obj)
+ spill := &Alloc{Comment: obj.Name()}
+ spill.setType(types.NewPointer(obj.Type()))
+ spill.setPos(obj.Pos())
+ f.objects[obj] = spill
+ f.Locals = append(f.Locals, spill)
+ f.emit(spill)
+ f.emit(&Store{Addr: spill, Val: param})
+}
+
+// startBody initializes the function prior to generating SSA code for its body.
+// Precondition: f.Type() already set.
+//
+func (f *Function) startBody() {
+ f.currentBlock = f.newBasicBlock("entry")
+ f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init
+}
+
+// createSyntacticParams populates f.Params and generates code (spills
+// and named result locals) for all the parameters declared in the
+// syntax. In addition it populates the f.objects mapping.
+//
+// Preconditions:
+// f.startBody() was called.
+// Postcondition:
+// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0)
+//
+func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) {
+ // Receiver (at most one inner iteration).
+ if recv != nil {
+ for _, field := range recv.List {
+ for _, n := range field.Names {
+ f.addSpilledParam(f.Pkg.info.Defs[n])
+ }
+ // Anonymous receiver? No need to spill.
+ if field.Names == nil {
+ f.addParamObj(f.Signature.Recv())
+ }
+ }
+ }
+
+ // Parameters.
+ if functype.Params != nil {
+ n := len(f.Params) // 1 if has recv, 0 otherwise
+ for _, field := range functype.Params.List {
+ for _, n := range field.Names {
+ f.addSpilledParam(f.Pkg.info.Defs[n])
+ }
+ // Anonymous parameter? No need to spill.
+ if field.Names == nil {
+ f.addParamObj(f.Signature.Params().At(len(f.Params) - n))
+ }
+ }
+ }
+
+ // Named results.
+ if functype.Results != nil {
+ for _, field := range functype.Results.List {
+ // Implicit "var" decl of locals for named results.
+ for _, n := range field.Names {
+ f.namedResults = append(f.namedResults, f.addLocalForIdent(n))
+ }
+ }
+ }
+}
+
+// numberRegisters assigns numbers to all SSA registers
+// (value-defining Instructions) in f, to aid debugging.
+// (Non-Instruction Values are named at construction.)
+//
+func numberRegisters(f *Function) {
+ v := 0
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ switch instr.(type) {
+ case Value:
+ instr.(interface {
+ setNum(int)
+ }).setNum(v)
+ v++
+ }
+ }
+ }
+}
+
+// buildReferrers populates the def/use information in all non-nil
+// Value.Referrers slice.
+// Precondition: all such slices are initially empty.
+func buildReferrers(f *Function) {
+ var rands []*Value
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ rands = instr.Operands(rands[:0]) // recycle storage
+ for _, rand := range rands {
+ if r := *rand; r != nil {
+ if ref := r.Referrers(); ref != nil {
+ *ref = append(*ref, instr)
+ }
+ }
+ }
+ }
+ }
+}
+
+// finishBody() finalizes the function after SSA code generation of its body.
+func (f *Function) finishBody() {
+ f.objects = nil
+ f.currentBlock = nil
+ f.lblocks = nil
+
+ // Don't pin the AST in memory (except in debug mode).
+ if n := f.syntax; n != nil && !f.debugInfo() {
+ f.syntax = extentNode{n.Pos(), n.End()}
+ }
+
+ // Remove from f.Locals any Allocs that escape to the heap.
+ j := 0
+ for _, l := range f.Locals {
+ if !l.Heap {
+ f.Locals[j] = l
+ j++
+ }
+ }
+ // Nil out f.Locals[j:] to aid GC.
+ for i := j; i < len(f.Locals); i++ {
+ f.Locals[i] = nil
+ }
+ f.Locals = f.Locals[:j]
+
+ optimizeBlocks(f)
+
+ buildReferrers(f)
+
+ buildDomTree(f)
+
+ if f.Prog.mode&NaiveForm == 0 {
+ // For debugging pre-state of lifting pass:
+ // numberRegisters(f)
+ // f.WriteTo(os.Stderr)
+ lift(f)
+ }
+
+ f.namedResults = nil // (used by lifting)
+
+ numberRegisters(f)
+
+ if f.Prog.mode&PrintFunctions != 0 {
+ printMu.Lock()
+ f.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
+
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ mustSanityCheck(f, nil)
+ }
+}
+
+// removeNilBlocks eliminates nils from f.Blocks and updates each
+// BasicBlock.Index. Use this after any pass that may delete blocks.
+//
+func (f *Function) removeNilBlocks() {
+ j := 0
+ for _, b := range f.Blocks {
+ if b != nil {
+ b.Index = j
+ f.Blocks[j] = b
+ j++
+ }
+ }
+ // Nil out f.Blocks[j:] to aid GC.
+ for i := j; i < len(f.Blocks); i++ {
+ f.Blocks[i] = nil
+ }
+ f.Blocks = f.Blocks[:j]
+}
+
+// SetDebugMode sets the debug mode for package pkg. If true, all its
+// functions will include full debug info. This greatly increases the
+// size of the instruction stream, and causes Functions to depend upon
+// the ASTs, potentially keeping them live in memory for longer.
+//
+func (pkg *Package) SetDebugMode(debug bool) {
+ // TODO(adonovan): do we want ast.File granularity?
+ pkg.debug = debug
+}
+
+// debugInfo reports whether debug info is wanted for this function.
+func (f *Function) debugInfo() bool {
+ return f.Pkg != nil && f.Pkg.debug
+}
+
+// addNamedLocal creates a local variable, adds it to function f and
+// returns it. Its name and type are taken from obj. Subsequent
+// calls to f.lookup(obj) will return the same local.
+//
+func (f *Function) addNamedLocal(obj types.Object) *Alloc {
+ l := f.addLocal(obj.Type(), obj.Pos())
+ l.Comment = obj.Name()
+ f.objects[obj] = l
+ return l
+}
+
+func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc {
+ return f.addNamedLocal(f.Pkg.info.Defs[id])
+}
+
+// addLocal creates an anonymous local variable of type typ, adds it
+// to function f and returns it. pos is the optional source location.
+//
+func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
+ v := &Alloc{}
+ v.setType(types.NewPointer(typ))
+ v.setPos(pos)
+ f.Locals = append(f.Locals, v)
+ f.emit(v)
+ return v
+}
+
+// lookup returns the address of the named variable identified by obj
+// that is local to function f or one of its enclosing functions.
+// If escaping, the reference comes from a potentially escaping pointer
+// expression and the referent must be heap-allocated.
+//
+func (f *Function) lookup(obj types.Object, escaping bool) Value {
+ if v, ok := f.objects[obj]; ok {
+ if alloc, ok := v.(*Alloc); ok && escaping {
+ alloc.Heap = true
+ }
+ return v // function-local var (address)
+ }
+
+ // Definition must be in an enclosing function;
+ // plumb it through intervening closures.
+ if f.parent == nil {
+ panic("no ssa.Value for " + obj.String())
+ }
+ outer := f.parent.lookup(obj, true) // escaping
+ v := &FreeVar{
+ name: obj.Name(),
+ typ: outer.Type(),
+ pos: outer.Pos(),
+ outer: outer,
+ parent: f,
+ }
+ f.objects[obj] = v
+ f.FreeVars = append(f.FreeVars, v)
+ return v
+}
+
+// emit emits the specified instruction to function f.
+func (f *Function) emit(instr Instruction) Value {
+ return f.currentBlock.emit(instr)
+}
+
+// RelString returns the full name of this function, qualified by
+// package name, receiver type, etc.
+//
+// The specific formatting rules are not guaranteed and may change.
+//
+// Examples:
+// "math.IsNaN" // a package-level function
+// "(*bytes.Buffer).Bytes" // a declared method or a wrapper
+// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0)
+// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure)
+// "main.main$1" // an anonymous function in main
+// "main.init#1" // a declared init function
+// "main.init" // the synthesized package initializer
+//
+// When these functions are referred to from within the same package
+// (i.e. from == f.Pkg.Object), they are rendered without the package path.
+// For example: "IsNaN", "(*Buffer).Bytes", etc.
+//
+// All non-synthetic functions have distinct package-qualified names.
+// (But two methods may have the same name "(T).f" if one is a synthetic
+// wrapper promoting a non-exported method "f" from another package; in
+// that case, the strings are equal but the identifiers "f" are distinct.)
+//
+func (f *Function) RelString(from *types.Package) string {
+ // Anonymous?
+ if f.parent != nil {
+ // An anonymous function's Name() looks like "parentName$1",
+ // but its String() should include the type/package/etc.
+ parent := f.parent.RelString(from)
+ for i, anon := range f.parent.AnonFuncs {
+ if anon == f {
+ return fmt.Sprintf("%s$%d", parent, 1+i)
+ }
+ }
+
+ return f.name // should never happen
+ }
+
+ // Method (declared or wrapper)?
+ if recv := f.Signature.Recv(); recv != nil {
+ return f.relMethod(from, recv.Type())
+ }
+
+ // Thunk?
+ if f.method != nil {
+ return f.relMethod(from, f.method.Recv())
+ }
+
+ // Bound?
+ if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") {
+ return f.relMethod(from, f.FreeVars[0].Type())
+ }
+
+ // Package-level function?
+ // Prefix with package name for cross-package references only.
+ if p := f.pkg(); p != nil && p != from {
+ return fmt.Sprintf("%s.%s", p.Path(), f.name)
+ }
+
+ // Unknown.
+ return f.name
+}
+
+func (f *Function) relMethod(from *types.Package, recv types.Type) string {
+ return fmt.Sprintf("(%s).%s", relType(recv, from), f.name)
+}
+
+// writeSignature writes to buf the signature sig in declaration syntax.
+func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) {
+ buf.WriteString("func ")
+ if recv := sig.Recv(); recv != nil {
+ buf.WriteString("(")
+ if n := params[0].Name(); n != "" {
+ buf.WriteString(n)
+ buf.WriteString(" ")
+ }
+ types.WriteType(buf, params[0].Type(), types.RelativeTo(from))
+ buf.WriteString(") ")
+ }
+ buf.WriteString(name)
+ types.WriteSignature(buf, sig, types.RelativeTo(from))
+}
+
+func (f *Function) pkg() *types.Package {
+ if f.Pkg != nil {
+ return f.Pkg.Pkg
+ }
+ return nil
+}
+
+var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer
+
+func (f *Function) WriteTo(w io.Writer) (int64, error) {
+ var buf bytes.Buffer
+ WriteFunction(&buf, f)
+ n, err := w.Write(buf.Bytes())
+ return int64(n), err
+}
+
+// WriteFunction writes to buf a human-readable "disassembly" of f.
+func WriteFunction(buf *bytes.Buffer, f *Function) {
+ fmt.Fprintf(buf, "# Name: %s\n", f.String())
+ if f.Pkg != nil {
+ fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path())
+ }
+ if syn := f.Synthetic; syn != "" {
+ fmt.Fprintln(buf, "# Synthetic:", syn)
+ }
+ if pos := f.Pos(); pos.IsValid() {
+ fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos))
+ }
+
+ if f.parent != nil {
+ fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name())
+ }
+
+ if f.Recover != nil {
+ fmt.Fprintf(buf, "# Recover: %s\n", f.Recover)
+ }
+
+ from := f.pkg()
+
+ if f.FreeVars != nil {
+ buf.WriteString("# Free variables:\n")
+ for i, fv := range f.FreeVars {
+ fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from))
+ }
+ }
+
+ if len(f.Locals) > 0 {
+ buf.WriteString("# Locals:\n")
+ for i, l := range f.Locals {
+ fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from))
+ }
+ }
+ writeSignature(buf, from, f.Name(), f.Signature, f.Params)
+ buf.WriteString(":\n")
+
+ if f.Blocks == nil {
+ buf.WriteString("\t(external)\n")
+ }
+
+ // NB. column calculations are confused by non-ASCII
+ // characters and assume 8-space tabs.
+ const punchcard = 80 // for old time's sake.
+ const tabwidth = 8
+ for _, b := range f.Blocks {
+ if b == nil {
+ // Corrupt CFG.
+ fmt.Fprintf(buf, ".nil:\n")
+ continue
+ }
+ n, _ := fmt.Fprintf(buf, "%d:", b.Index)
+ bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs))
+ fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg)
+
+ if false { // CFG debugging
+ fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs)
+ }
+ for _, instr := range b.Instrs {
+ buf.WriteString("\t")
+ switch v := instr.(type) {
+ case Value:
+ l := punchcard - tabwidth
+ // Left-align the instruction.
+ if name := v.Name(); name != "" {
+ n, _ := fmt.Fprintf(buf, "%s = ", name)
+ l -= n
+ }
+ n, _ := buf.WriteString(instr.String())
+ l -= n
+ // Right-align the type if there's space.
+ if t := v.Type(); t != nil {
+ buf.WriteByte(' ')
+ ts := relType(t, from)
+ l -= len(ts) + len(" ") // (spaces before and after type)
+ if l > 0 {
+ fmt.Fprintf(buf, "%*s", l, "")
+ }
+ buf.WriteString(ts)
+ }
+ case nil:
+ // Be robust against bad transforms.
+ buf.WriteString("<deleted>")
+ default:
+ buf.WriteString(instr.String())
+ }
+ buf.WriteString("\n")
+ }
+ }
+ fmt.Fprintf(buf, "\n")
+}
+
+// newBasicBlock adds to f a new basic block and returns it. It does
+// not automatically become the current block for subsequent calls to emit.
+// comment is an optional string for more readable debugging output.
+//
+func (f *Function) newBasicBlock(comment string) *BasicBlock {
+ b := &BasicBlock{
+ Index: len(f.Blocks),
+ Comment: comment,
+ parent: f,
+ }
+ b.Succs = b.succs2[:0]
+ f.Blocks = append(f.Blocks, b)
+ return b
+}
+
+// NewFunction returns a new synthetic Function instance belonging to
+// prog, with its name and signature fields set as specified.
+//
+// The caller is responsible for initializing the remaining fields of
+// the function object, e.g. Pkg, Params, Blocks.
+//
+// It is practically impossible for clients to construct well-formed
+// SSA functions/packages/programs directly, so we assume this is the
+// job of the Builder alone. NewFunction exists to provide clients a
+// little flexibility. For example, analysis tools may wish to
+// construct fake Functions for the root of the callgraph, a fake
+// "reflect" package, etc.
+//
+// TODO(adonovan): think harder about the API here.
+//
+func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function {
+ return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance}
+}
+
+type extentNode [2]token.Pos
+
+func (n extentNode) Pos() token.Pos { return n[0] }
+func (n extentNode) End() token.Pos { return n[1] }
+
+// Syntax returns an ast.Node whose Pos/End methods provide the
+// lexical extent of the function if it was defined by Go source code
+// (f.Synthetic==""), or nil otherwise.
+//
+// If f was built with debug information (see Package.SetDebugRef),
+// the result is the *ast.FuncDecl or *ast.FuncLit that declared the
+// function. Otherwise, it is an opaque Node providing only position
+// information; this avoids pinning the AST in memory.
+//
+func (f *Function) Syntax() ast.Node { return f.syntax }
diff --git a/vendor/golang.org/x/tools/go/ssa/identical.go b/vendor/golang.org/x/tools/go/ssa/identical.go
new file mode 100644
index 0000000..53cbee1
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/identical.go
@@ -0,0 +1,7 @@
+// +build go1.8
+
+package ssa
+
+import "go/types"
+
+var structTypesIdentical = types.IdenticalIgnoreTags
diff --git a/vendor/golang.org/x/tools/go/ssa/identical_17.go b/vendor/golang.org/x/tools/go/ssa/identical_17.go
new file mode 100644
index 0000000..da89d33
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/identical_17.go
@@ -0,0 +1,7 @@
+// +build !go1.8
+
+package ssa
+
+import "go/types"
+
+var structTypesIdentical = types.Identical
diff --git a/vendor/golang.org/x/tools/go/ssa/identical_test.go b/vendor/golang.org/x/tools/go/ssa/identical_test.go
new file mode 100644
index 0000000..404693d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/identical_test.go
@@ -0,0 +1,9 @@
+//+build go1.8
+
+package ssa_test
+
+import "testing"
+
+func TestValueForExprStructConv(t *testing.T) {
+ testValueForExpr(t, "testdata/structconv.go")
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/lift.go b/vendor/golang.org/x/tools/go/ssa/lift.go
new file mode 100644
index 0000000..048e9b0
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/lift.go
@@ -0,0 +1,653 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the lifting pass which tries to "lift" Alloc
+// cells (new/local variables) into SSA registers, replacing loads
+// with the dominating stored value, eliminating loads and stores, and
+// inserting φ-nodes as needed.
+
+// Cited papers and resources:
+//
+// Ron Cytron et al. 1991. Efficiently computing SSA form...
+// http://doi.acm.org/10.1145/115372.115320
+//
+// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm.
+// Software Practice and Experience 2001, 4:1-10.
+// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf
+//
+// Daniel Berlin, llvmdev mailing list, 2012.
+// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html
+// (Be sure to expand the whole thread.)
+
+// TODO(adonovan): opt: there are many optimizations worth evaluating, and
+// the conventional wisdom for SSA construction is that a simple
+// algorithm well engineered often beats those of better asymptotic
+// complexity on all but the most egregious inputs.
+//
+// Danny Berlin suggests that the Cooper et al. algorithm for
+// computing the dominance frontier is superior to Cytron et al.
+// Furthermore he recommends that rather than computing the DF for the
+// whole function then renaming all alloc cells, it may be cheaper to
+// compute the DF for each alloc cell separately and throw it away.
+//
+// Consider exploiting liveness information to avoid creating dead
+// φ-nodes which we then immediately remove.
+//
+// Also see many other "TODO: opt" suggestions in the code.
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "math/big"
+ "os"
+)
+
+// If true, show diagnostic information at each step of lifting.
+// Very verbose.
+const debugLifting = false
+
+// domFrontier maps each block to the set of blocks in its dominance
+// frontier. The outer slice is conceptually a map keyed by
+// Block.Index. The inner slice is conceptually a set, possibly
+// containing duplicates.
+//
+// TODO(adonovan): opt: measure impact of dups; consider a packed bit
+// representation, e.g. big.Int, and bitwise parallel operations for
+// the union step in the Children loop.
+//
+// domFrontier's methods mutate the slice's elements but not its
+// length, so their receivers needn't be pointers.
+//
+type domFrontier [][]*BasicBlock
+
+func (df domFrontier) add(u, v *BasicBlock) {
+ p := &df[u.Index]
+ *p = append(*p, v)
+}
+
+// build builds the dominance frontier df for the dominator (sub)tree
+// rooted at u, using the Cytron et al. algorithm.
+//
+// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA
+// by pruning the entire IDF computation, rather than merely pruning
+// the DF -> IDF step.
+func (df domFrontier) build(u *BasicBlock) {
+ // Encounter each node u in postorder of dom tree.
+ for _, child := range u.dom.children {
+ df.build(child)
+ }
+ for _, vb := range u.Succs {
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ for _, w := range u.dom.children {
+ for _, vb := range df[w.Index] {
+ // TODO(adonovan): opt: use word-parallel bitwise union.
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ }
+}
+
+func buildDomFrontier(fn *Function) domFrontier {
+ df := make(domFrontier, len(fn.Blocks))
+ df.build(fn.Blocks[0])
+ if fn.Recover != nil {
+ df.build(fn.Recover)
+ }
+ return df
+}
+
+func removeInstr(refs []Instruction, instr Instruction) []Instruction {
+ i := 0
+ for _, ref := range refs {
+ if ref == instr {
+ continue
+ }
+ refs[i] = ref
+ i++
+ }
+ for j := i; j != len(refs); j++ {
+ refs[j] = nil // aid GC
+ }
+ return refs[:i]
+}
+
+// lift replaces local and new Allocs accessed only with
+// load/store by SSA registers, inserting φ-nodes where necessary.
+// The result is a program in classical pruned SSA form.
+//
+// Preconditions:
+// - fn has no dead blocks (blockopt has run).
+// - Def/use info (Operands and Referrers) is up-to-date.
+// - The dominator tree is up-to-date.
+//
+func lift(fn *Function) {
+ // TODO(adonovan): opt: lots of little optimizations may be
+ // worthwhile here, especially if they cause us to avoid
+ // buildDomFrontier. For example:
+ //
+ // - Alloc never loaded? Eliminate.
+ // - Alloc never stored? Replace all loads with a zero constant.
+ // - Alloc stored once? Replace loads with dominating store;
+ // don't forget that an Alloc is itself an effective store
+ // of zero.
+ // - Alloc used only within a single block?
+ // Use degenerate algorithm avoiding φ-nodes.
+ // - Consider synergy with scalar replacement of aggregates (SRA).
+ // e.g. *(&x.f) where x is an Alloc.
+ // Perhaps we'd get better results if we generated this as x.f
+ // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)).
+ // Unclear.
+ //
+ // But we will start with the simplest correct code.
+ df := buildDomFrontier(fn)
+
+ if debugLifting {
+ title := false
+ for i, blocks := range df {
+ if blocks != nil {
+ if !title {
+ fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn)
+ title = true
+ }
+ fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks)
+ }
+ }
+ }
+
+ newPhis := make(newPhiMap)
+
+ // During this pass we will replace some BasicBlock.Instrs
+ // (allocs, loads and stores) with nil, keeping a count in
+ // BasicBlock.gaps. At the end we will reset Instrs to the
+ // concatenation of all non-dead newPhis and non-nil Instrs
+ // for the block, reusing the original array if space permits.
+
+ // While we're here, we also eliminate 'rundefers'
+ // instructions in functions that contain no 'defer'
+ // instructions.
+ usesDefer := false
+
+ // A counter used to generate ~unique ids for Phi nodes, as an
+ // aid to debugging. We use large numbers to make them highly
+ // visible. All nodes are renumbered later.
+ fresh := 1000
+
+ // Determine which allocs we can lift and number them densely.
+ // The renaming phase uses this numbering for compact maps.
+ numAllocs := 0
+ for _, b := range fn.Blocks {
+ b.gaps = 0
+ b.rundefers = 0
+ for _, instr := range b.Instrs {
+ switch instr := instr.(type) {
+ case *Alloc:
+ index := -1
+ if liftAlloc(df, instr, newPhis, &fresh) {
+ index = numAllocs
+ numAllocs++
+ }
+ instr.index = index
+ case *Defer:
+ usesDefer = true
+ case *RunDefers:
+ b.rundefers++
+ }
+ }
+ }
+
+ // renaming maps an alloc (keyed by index) to its replacement
+ // value. Initially the renaming contains nil, signifying the
+ // zero constant of the appropriate type; we construct the
+ // Const lazily at most once on each path through the domtree.
+ // TODO(adonovan): opt: cache per-function not per subtree.
+ renaming := make([]Value, numAllocs)
+
+ // Renaming.
+ rename(fn.Blocks[0], renaming, newPhis)
+
+ // Eliminate dead φ-nodes.
+ removeDeadPhis(fn.Blocks, newPhis)
+
+ // Prepend remaining live φ-nodes to each block.
+ for _, b := range fn.Blocks {
+ nps := newPhis[b]
+ j := len(nps)
+
+ rundefersToKill := b.rundefers
+ if usesDefer {
+ rundefersToKill = 0
+ }
+
+ if j+b.gaps+rundefersToKill == 0 {
+ continue // fast path: no new phis or gaps
+ }
+
+ // Compact nps + non-nil Instrs into a new slice.
+ // TODO(adonovan): opt: compact in situ (rightwards)
+ // if Instrs has sufficient space or slack.
+ dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill)
+ for i, np := range nps {
+ dst[i] = np.phi
+ }
+ for _, instr := range b.Instrs {
+ if instr == nil {
+ continue
+ }
+ if !usesDefer {
+ if _, ok := instr.(*RunDefers); ok {
+ continue
+ }
+ }
+ dst[j] = instr
+ j++
+ }
+ b.Instrs = dst
+ }
+
+ // Remove any fn.Locals that were lifted.
+ j := 0
+ for _, l := range fn.Locals {
+ if l.index < 0 {
+ fn.Locals[j] = l
+ j++
+ }
+ }
+ // Nil out fn.Locals[j:] to aid GC.
+ for i := j; i < len(fn.Locals); i++ {
+ fn.Locals[i] = nil
+ }
+ fn.Locals = fn.Locals[:j]
+}
+
+// removeDeadPhis removes φ-nodes not transitively needed by a
+// non-Phi, non-DebugRef instruction.
+func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) {
+ // First pass: find the set of "live" φ-nodes: those reachable
+ // from some non-Phi instruction.
+ //
+ // We compute reachability in reverse, starting from each φ,
+ // rather than forwards, starting from each live non-Phi
+ // instruction, because this way visits much less of the
+ // Value graph.
+ livePhis := make(map[*Phi]bool)
+ for _, npList := range newPhis {
+ for _, np := range npList {
+ phi := np.phi
+ if !livePhis[phi] && phiHasDirectReferrer(phi) {
+ markLivePhi(livePhis, phi)
+ }
+ }
+ }
+
+ // Existing φ-nodes due to && and || operators
+ // are all considered live (see Go issue 19622).
+ for _, b := range blocks {
+ for _, phi := range b.phis() {
+ markLivePhi(livePhis, phi.(*Phi))
+ }
+ }
+
+ // Second pass: eliminate unused phis from newPhis.
+ for block, npList := range newPhis {
+ j := 0
+ for _, np := range npList {
+ if livePhis[np.phi] {
+ npList[j] = np
+ j++
+ } else {
+ // discard it, first removing it from referrers
+ for _, val := range np.phi.Edges {
+ if refs := val.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, np.phi)
+ }
+ }
+ np.phi.block = nil
+ }
+ }
+ newPhis[block] = npList[:j]
+ }
+}
+
+// markLivePhi marks phi, and all φ-nodes transitively reachable via
+// its Operands, live.
+func markLivePhi(livePhis map[*Phi]bool, phi *Phi) {
+ livePhis[phi] = true
+ for _, rand := range phi.Operands(nil) {
+ if q, ok := (*rand).(*Phi); ok {
+ if !livePhis[q] {
+ markLivePhi(livePhis, q)
+ }
+ }
+ }
+}
+
+// phiHasDirectReferrer reports whether phi is directly referred to by
+// a non-Phi instruction. Such instructions are the
+// roots of the liveness traversal.
+func phiHasDirectReferrer(phi *Phi) bool {
+ for _, instr := range *phi.Referrers() {
+ if _, ok := instr.(*Phi); !ok {
+ return true
+ }
+ }
+ return false
+}
+
+type blockSet struct{ big.Int } // (inherit methods from Int)
+
+// add adds b to the set and returns true if the set changed.
+func (s *blockSet) add(b *BasicBlock) bool {
+ i := b.Index
+ if s.Bit(i) != 0 {
+ return false
+ }
+ s.SetBit(&s.Int, i, 1)
+ return true
+}
+
+// take removes an arbitrary element from a set s and
+// returns its index, or returns -1 if empty.
+func (s *blockSet) take() int {
+ l := s.BitLen()
+ for i := 0; i < l; i++ {
+ if s.Bit(i) == 1 {
+ s.SetBit(&s.Int, i, 0)
+ return i
+ }
+ }
+ return -1
+}
+
+// newPhi is a pair of a newly introduced φ-node and the lifted Alloc
+// it replaces.
+type newPhi struct {
+ phi *Phi
+ alloc *Alloc
+}
+
+// newPhiMap records for each basic block, the set of newPhis that
+// must be prepended to the block.
+type newPhiMap map[*BasicBlock][]newPhi
+
+// liftAlloc determines whether alloc can be lifted into registers,
+// and if so, it populates newPhis with all the φ-nodes it may require
+// and returns true.
+//
+// fresh is a source of fresh ids for phi nodes.
+//
+func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool {
+ // Don't lift aggregates into registers, because we don't have
+ // a way to express their zero-constants.
+ switch deref(alloc.Type()).Underlying().(type) {
+ case *types.Array, *types.Struct:
+ return false
+ }
+
+ // Don't lift named return values in functions that defer
+ // calls that may recover from panic.
+ if fn := alloc.Parent(); fn.Recover != nil {
+ for _, nr := range fn.namedResults {
+ if nr == alloc {
+ return false
+ }
+ }
+ }
+
+ // Compute defblocks, the set of blocks containing a
+ // definition of the alloc cell.
+ var defblocks blockSet
+ for _, instr := range *alloc.Referrers() {
+ // Bail out if we discover the alloc is not liftable;
+ // the only operations permitted to use the alloc are
+ // loads/stores into the cell, and DebugRef.
+ switch instr := instr.(type) {
+ case *Store:
+ if instr.Val == alloc {
+ return false // address used as value
+ }
+ if instr.Addr != alloc {
+ panic("Alloc.Referrers is inconsistent")
+ }
+ defblocks.add(instr.Block())
+ case *UnOp:
+ if instr.Op != token.MUL {
+ return false // not a load
+ }
+ if instr.X != alloc {
+ panic("Alloc.Referrers is inconsistent")
+ }
+ case *DebugRef:
+ // ok
+ default:
+ return false // some other instruction
+ }
+ }
+ // The Alloc itself counts as a (zero) definition of the cell.
+ defblocks.add(alloc.Block())
+
+ if debugLifting {
+ fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
+ }
+
+ fn := alloc.Parent()
+
+ // Φ-insertion.
+ //
+ // What follows is the body of the main loop of the insert-φ
+ // function described by Cytron et al, but instead of using
+ // counter tricks, we just reset the 'hasAlready' and 'work'
+ // sets each iteration. These are bitmaps so it's pretty cheap.
+ //
+ // TODO(adonovan): opt: recycle slice storage for W,
+ // hasAlready, defBlocks across liftAlloc calls.
+ var hasAlready blockSet
+
+ // Initialize W and work to defblocks.
+ var work blockSet = defblocks // blocks seen
+ var W blockSet // blocks to do
+ W.Set(&defblocks.Int)
+
+ // Traverse iterated dominance frontier, inserting φ-nodes.
+ for i := W.take(); i != -1; i = W.take() {
+ u := fn.Blocks[i]
+ for _, v := range df[u.Index] {
+ if hasAlready.add(v) {
+ // Create φ-node.
+ // It will be prepended to v.Instrs later, if needed.
+ phi := &Phi{
+ Edges: make([]Value, len(v.Preds)),
+ Comment: alloc.Comment,
+ }
+ // This is merely a debugging aid:
+ phi.setNum(*fresh)
+ *fresh++
+
+ phi.pos = alloc.Pos()
+ phi.setType(deref(alloc.Type()))
+ phi.block = v
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v)
+ }
+ newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
+
+ if work.add(v) {
+ W.add(v)
+ }
+ }
+ }
+ }
+
+ return true
+}
+
+// replaceAll replaces all intraprocedural uses of x with y,
+// updating x.Referrers and y.Referrers.
+// Precondition: x.Referrers() != nil, i.e. x must be local to some function.
+//
+func replaceAll(x, y Value) {
+ var rands []*Value
+ pxrefs := x.Referrers()
+ pyrefs := y.Referrers()
+ for _, instr := range *pxrefs {
+ rands = instr.Operands(rands[:0]) // recycle storage
+ for _, rand := range rands {
+ if *rand != nil {
+ if *rand == x {
+ *rand = y
+ }
+ }
+ }
+ if pyrefs != nil {
+ *pyrefs = append(*pyrefs, instr) // dups ok
+ }
+ }
+ *pxrefs = nil // x is now unreferenced
+}
+
+// renamed returns the value to which alloc is being renamed,
+// constructing it lazily if it's the implicit zero initialization.
+//
+func renamed(renaming []Value, alloc *Alloc) Value {
+ v := renaming[alloc.index]
+ if v == nil {
+ v = zeroConst(deref(alloc.Type()))
+ renaming[alloc.index] = v
+ }
+ return v
+}
+
+// rename implements the (Cytron et al) SSA renaming algorithm, a
+// preorder traversal of the dominator tree replacing all loads of
+// Alloc cells with the value stored to that cell by the dominating
+// store instruction. For lifting, we need only consider loads,
+// stores and φ-nodes.
+//
+// renaming is a map from *Alloc (keyed by index number) to its
+// dominating stored value; newPhis[x] is the set of new φ-nodes to be
+// prepended to block x.
+//
+func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
+ // Each φ-node becomes the new name for its associated Alloc.
+ for _, np := range newPhis[u] {
+ phi := np.phi
+ alloc := np.alloc
+ renaming[alloc.index] = phi
+ }
+
+ // Rename loads and stores of allocs.
+ for i, instr := range u.Instrs {
+ switch instr := instr.(type) {
+ case *Alloc:
+ if instr.index >= 0 { // store of zero to Alloc cell
+ // Replace dominated loads by the zero value.
+ renaming[instr.index] = nil
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr)
+ }
+ // Delete the Alloc.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+
+ case *Store:
+ if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell
+ // Replace dominated loads by the stored value.
+ renaming[alloc.index] = instr.Val
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n",
+ instr, instr.Val.Name())
+ }
+ // Remove the store from the referrer list of the stored value.
+ if refs := instr.Val.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, instr)
+ }
+ // Delete the Store.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+
+ case *UnOp:
+ if instr.Op == token.MUL {
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n",
+ instr.Name(), instr, newval.Name())
+ }
+ // Replace all references to
+ // the loaded value by the
+ // dominating stored value.
+ replaceAll(instr, newval)
+ // Delete the Load.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+ }
+
+ case *DebugRef:
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell
+ if instr.IsAddr {
+ instr.X = renamed(renaming, alloc)
+ instr.IsAddr = false
+
+ // Add DebugRef to instr.X's referrers.
+ if refs := instr.X.Referrers(); refs != nil {
+ *refs = append(*refs, instr)
+ }
+ } else {
+ // A source expression denotes the address
+ // of an Alloc that was optimized away.
+ instr.X = nil
+
+ // Delete the DebugRef.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+ }
+ }
+ }
+
+ // For each φ-node in a CFG successor, rename the edge.
+ for _, v := range u.Succs {
+ phis := newPhis[v]
+ if len(phis) == 0 {
+ continue
+ }
+ i := v.predIndex(u)
+ for _, np := range phis {
+ phi := np.phi
+ alloc := np.alloc
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n",
+ phi.Name(), u, v, i, alloc.Name(), newval.Name())
+ }
+ phi.Edges[i] = newval
+ if prefs := newval.Referrers(); prefs != nil {
+ *prefs = append(*prefs, phi)
+ }
+ }
+ }
+
+ // Continue depth-first recursion over domtree, pushing a
+ // fresh copy of the renaming map for each subtree.
+ for i, v := range u.dom.children {
+ r := renaming
+ if i < len(u.dom.children)-1 {
+ // On all but the final iteration, we must make
+ // a copy to avoid destructive update.
+ r = make([]Value, len(renaming))
+ copy(r, renaming)
+ }
+ rename(v, r, newPhis)
+ }
+
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/lvalue.go b/vendor/golang.org/x/tools/go/ssa/lvalue.go
new file mode 100644
index 0000000..4d85be3
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/lvalue.go
@@ -0,0 +1,120 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// lvalues are the union of addressable expressions and map-index
+// expressions.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// An lvalue represents an assignable location that may appear on the
+// left-hand side of an assignment. This is a generalization of a
+// pointer to permit updates to elements of maps.
+//
+type lvalue interface {
+ store(fn *Function, v Value) // stores v into the location
+ load(fn *Function) Value // loads the contents of the location
+ address(fn *Function) Value // address of the location
+ typ() types.Type // returns the type of the location
+}
+
+// An address is an lvalue represented by a true pointer.
+type address struct {
+ addr Value
+ pos token.Pos // source position
+ expr ast.Expr // source syntax of the value (not address) [debug mode]
+}
+
+func (a *address) load(fn *Function) Value {
+ load := emitLoad(fn, a.addr)
+ load.pos = a.pos
+ return load
+}
+
+func (a *address) store(fn *Function, v Value) {
+ store := emitStore(fn, a.addr, v, a.pos)
+ if a.expr != nil {
+ // store.Val is v, converted for assignability.
+ emitDebugRef(fn, a.expr, store.Val, false)
+ }
+}
+
+func (a *address) address(fn *Function) Value {
+ if a.expr != nil {
+ emitDebugRef(fn, a.expr, a.addr, true)
+ }
+ return a.addr
+}
+
+func (a *address) typ() types.Type {
+ return deref(a.addr.Type())
+}
+
+// An element is an lvalue represented by m[k], the location of an
+// element of a map or string. These locations are not addressable
+// since pointers cannot be formed from them, but they do support
+// load(), and in the case of maps, store().
+//
+type element struct {
+ m, k Value // map or string
+ t types.Type // map element type or string byte type
+ pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v)
+}
+
+func (e *element) load(fn *Function) Value {
+ l := &Lookup{
+ X: e.m,
+ Index: e.k,
+ }
+ l.setPos(e.pos)
+ l.setType(e.t)
+ return fn.emit(l)
+}
+
+func (e *element) store(fn *Function, v Value) {
+ up := &MapUpdate{
+ Map: e.m,
+ Key: e.k,
+ Value: emitConv(fn, v, e.t),
+ }
+ up.pos = e.pos
+ fn.emit(up)
+}
+
+func (e *element) address(fn *Function) Value {
+ panic("map/string elements are not addressable")
+}
+
+func (e *element) typ() types.Type {
+ return e.t
+}
+
+// A blank is a dummy variable whose name is "_".
+// It is not reified: loads are illegal and stores are ignored.
+//
+type blank struct{}
+
+func (bl blank) load(fn *Function) Value {
+ panic("blank.load is illegal")
+}
+
+func (bl blank) store(fn *Function, v Value) {
+ // no-op
+}
+
+func (bl blank) address(fn *Function) Value {
+ panic("blank var is not addressable")
+}
+
+func (bl blank) typ() types.Type {
+ // This should be the type of the blank Ident; the typechecker
+ // doesn't provide this yet, but fortunately, we don't need it
+ // yet either.
+ panic("blank.typ is unimplemented")
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go
new file mode 100644
index 0000000..080dca9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/methods.go
@@ -0,0 +1,239 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines utilities for population of method sets.
+
+import (
+ "fmt"
+ "go/types"
+)
+
+// MethodValue returns the Function implementing method sel, building
+// wrapper methods on demand. It returns nil if sel denotes an
+// abstract (interface) method.
+//
+// Precondition: sel.Kind() == MethodVal.
+//
+// Thread-safe.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+//
+func (prog *Program) MethodValue(sel *types.Selection) *Function {
+ if sel.Kind() != types.MethodVal {
+ panic(fmt.Sprintf("Method(%s) kind != MethodVal", sel))
+ }
+ T := sel.Recv()
+ if isInterface(T) {
+ return nil // abstract method
+ }
+ if prog.mode&LogSource != 0 {
+ defer logStack("Method %s %v", T, sel)()
+ }
+
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ return prog.addMethod(prog.createMethodSet(T), sel)
+}
+
+// LookupMethod returns the implementation of the method of type T
+// identified by (pkg, name). It returns nil if the method exists but
+// is abstract, and panics if T has no such method.
+//
+func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function {
+ sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name)
+ if sel == nil {
+ panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name)))
+ }
+ return prog.MethodValue(sel)
+}
+
+// methodSet contains the (concrete) methods of a non-interface type.
+type methodSet struct {
+ mapping map[string]*Function // populated lazily
+ complete bool // mapping contains all methods
+}
+
+// Precondition: !isInterface(T).
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+func (prog *Program) createMethodSet(T types.Type) *methodSet {
+ mset, ok := prog.methodSets.At(T).(*methodSet)
+ if !ok {
+ mset = &methodSet{mapping: make(map[string]*Function)}
+ prog.methodSets.Set(T, mset)
+ }
+ return mset
+}
+
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function {
+ if sel.Kind() == types.MethodExpr {
+ panic(sel)
+ }
+ id := sel.Obj().Id()
+ fn := mset.mapping[id]
+ if fn == nil {
+ obj := sel.Obj().(*types.Func)
+
+ needsPromotion := len(sel.Index()) > 1
+ needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv())
+ if needsPromotion || needsIndirection {
+ fn = makeWrapper(prog, sel)
+ } else {
+ fn = prog.declaredFunc(obj)
+ }
+ if fn.Signature.Recv() == nil {
+ panic(fn) // missing receiver
+ }
+ mset.mapping[id] = fn
+ }
+ return fn
+}
+
+// RuntimeTypes returns a new unordered slice containing all
+// concrete types in the program for which a complete (non-empty)
+// method set is required at run-time.
+//
+// Thread-safe.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+//
+func (prog *Program) RuntimeTypes() []types.Type {
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ var res []types.Type
+ prog.methodSets.Iterate(func(T types.Type, v interface{}) {
+ if v.(*methodSet).complete {
+ res = append(res, T)
+ }
+ })
+ return res
+}
+
+// declaredFunc returns the concrete function/method denoted by obj.
+// Panic ensues if there is none.
+//
+func (prog *Program) declaredFunc(obj *types.Func) *Function {
+ if v := prog.packageLevelValue(obj); v != nil {
+ return v.(*Function)
+ }
+ panic("no concrete method: " + obj.String())
+}
+
+// needMethodsOf ensures that runtime type information (including the
+// complete method set) is available for the specified type T and all
+// its subcomponents.
+//
+// needMethodsOf must be called for at least every type that is an
+// operand of some MakeInterface instruction, and for the type of
+// every exported package member.
+//
+// Precondition: T is not a method signature (*Signature with Recv()!=nil).
+//
+// Thread-safe. (Called via emitConv from multiple builder goroutines.)
+//
+// TODO(adonovan): make this faster. It accounts for 20% of SSA build time.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+//
+func (prog *Program) needMethodsOf(T types.Type) {
+ prog.methodsMu.Lock()
+ prog.needMethods(T, false)
+ prog.methodsMu.Unlock()
+}
+
+// Precondition: T is not a method signature (*Signature with Recv()!=nil).
+// Recursive case: skip => don't create methods for T.
+//
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+//
+func (prog *Program) needMethods(T types.Type, skip bool) {
+ // Each package maintains its own set of types it has visited.
+ if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok {
+ // needMethods(T) was previously called
+ if !prevSkip || skip {
+ return // already seen, with same or false 'skip' value
+ }
+ }
+ prog.runtimeTypes.Set(T, skip)
+
+ tmset := prog.MethodSets.MethodSet(T)
+
+ if !skip && !isInterface(T) && tmset.Len() > 0 {
+ // Create methods of T.
+ mset := prog.createMethodSet(T)
+ if !mset.complete {
+ mset.complete = true
+ n := tmset.Len()
+ for i := 0; i < n; i++ {
+ prog.addMethod(mset, tmset.At(i))
+ }
+ }
+ }
+
+ // Recursion over signatures of each method.
+ for i := 0; i < tmset.Len(); i++ {
+ sig := tmset.At(i).Type().(*types.Signature)
+ prog.needMethods(sig.Params(), false)
+ prog.needMethods(sig.Results(), false)
+ }
+
+ switch t := T.(type) {
+ case *types.Basic:
+ // nop
+
+ case *types.Interface:
+ // nop---handled by recursion over method set.
+
+ case *types.Pointer:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Slice:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Chan:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Map:
+ prog.needMethods(t.Key(), false)
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Signature:
+ if t.Recv() != nil {
+ panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv()))
+ }
+ prog.needMethods(t.Params(), false)
+ prog.needMethods(t.Results(), false)
+
+ case *types.Named:
+ // A pointer-to-named type can be derived from a named
+ // type via reflection. It may have methods too.
+ prog.needMethods(types.NewPointer(T), false)
+
+ // Consider 'type T struct{S}' where S has methods.
+ // Reflection provides no way to get from T to struct{S},
+ // only to S, so the method set of struct{S} is unwanted,
+ // so set 'skip' flag during recursion.
+ prog.needMethods(t.Underlying(), true)
+
+ case *types.Array:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Struct:
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ prog.needMethods(t.Field(i).Type(), false)
+ }
+
+ case *types.Tuple:
+ for i, n := 0, t.Len(); i < n; i++ {
+ prog.needMethods(t.At(i).Type(), false)
+ }
+
+ default:
+ panic(T)
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/mode.go b/vendor/golang.org/x/tools/go/ssa/mode.go
new file mode 100644
index 0000000..d2a2698
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/mode.go
@@ -0,0 +1,100 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the BuilderMode type and its command-line flag.
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// BuilderMode is a bitmask of options for diagnostics and checking.
+//
+// *BuilderMode satisfies the flag.Value interface. Example:
+//
+// var mode = ssa.BuilderMode(0)
+// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
+//
+type BuilderMode uint
+
+const (
+ PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout
+ PrintFunctions // Print function SSA code to stdout
+ LogSource // Log source locations as SSA builder progresses
+ SanityCheckFunctions // Perform sanity checking of function bodies
+ NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers
+ BuildSerially // Build packages serially, not in parallel.
+ GlobalDebug // Enable debug info for all packages
+ BareInits // Build init functions without guards or calls to dependent inits
+)
+
+const BuilderModeDoc = `Options controlling the SSA builder.
+The value is a sequence of zero or more of these letters:
+C perform sanity [C]hecking of the SSA form.
+D include [D]ebug info for every function.
+P print [P]ackage inventory.
+F print [F]unction SSA code.
+S log [S]ource locations as SSA builder progresses.
+L build distinct packages seria[L]ly instead of in parallel.
+N build [N]aive SSA form: don't replace local loads/stores with registers.
+I build bare [I]nit functions: no init guards or calls to dependent inits.
+`
+
+func (m BuilderMode) String() string {
+ var buf bytes.Buffer
+ if m&GlobalDebug != 0 {
+ buf.WriteByte('D')
+ }
+ if m&PrintPackages != 0 {
+ buf.WriteByte('P')
+ }
+ if m&PrintFunctions != 0 {
+ buf.WriteByte('F')
+ }
+ if m&LogSource != 0 {
+ buf.WriteByte('S')
+ }
+ if m&SanityCheckFunctions != 0 {
+ buf.WriteByte('C')
+ }
+ if m&NaiveForm != 0 {
+ buf.WriteByte('N')
+ }
+ if m&BuildSerially != 0 {
+ buf.WriteByte('L')
+ }
+ return buf.String()
+}
+
+// Set parses the flag characters in s and updates *m.
+func (m *BuilderMode) Set(s string) error {
+ var mode BuilderMode
+ for _, c := range s {
+ switch c {
+ case 'D':
+ mode |= GlobalDebug
+ case 'P':
+ mode |= PrintPackages
+ case 'F':
+ mode |= PrintFunctions
+ case 'S':
+ mode |= LogSource | BuildSerially
+ case 'C':
+ mode |= SanityCheckFunctions
+ case 'N':
+ mode |= NaiveForm
+ case 'L':
+ mode |= BuildSerially
+ default:
+ return fmt.Errorf("unknown BuilderMode option: %q", c)
+ }
+ }
+ *m = mode
+ return nil
+}
+
+// Get returns m.
+func (m BuilderMode) Get() interface{} { return m }
diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go
new file mode 100644
index 0000000..3333ba4
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/print.go
@@ -0,0 +1,431 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the String() methods for all Value and
+// Instruction types.
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "io"
+ "reflect"
+ "sort"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// relName returns the name of v relative to i.
+// In most cases, this is identical to v.Name(), but references to
+// Functions (including methods) and Globals use RelString and
+// all types are displayed with relType, so that only cross-package
+// references are package-qualified.
+//
+func relName(v Value, i Instruction) string {
+ var from *types.Package
+ if i != nil {
+ from = i.Parent().pkg()
+ }
+ switch v := v.(type) {
+ case Member: // *Function or *Global
+ return v.RelString(from)
+ case *Const:
+ return v.RelString(from)
+ }
+ return v.Name()
+}
+
+func relType(t types.Type, from *types.Package) string {
+ return types.TypeString(t, types.RelativeTo(from))
+}
+
+func relString(m Member, from *types.Package) string {
+ // NB: not all globals have an Object (e.g. init$guard),
+ // so use Package().Object not Object.Package().
+ if pkg := m.Package().Pkg; pkg != nil && pkg != from {
+ return fmt.Sprintf("%s.%s", pkg.Path(), m.Name())
+ }
+ return m.Name()
+}
+
+// Value.String()
+//
+// This method is provided only for debugging.
+// It never appears in disassembly, which uses Value.Name().
+
+func (v *Parameter) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from))
+}
+
+func (v *FreeVar) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from))
+}
+
+func (v *Builtin) String() string {
+ return fmt.Sprintf("builtin %s", v.Name())
+}
+
+// Instruction.String()
+
+func (v *Alloc) String() string {
+ op := "local"
+ if v.Heap {
+ op = "new"
+ }
+ from := v.Parent().pkg()
+ return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment)
+}
+
+func (v *Phi) String() string {
+ var b bytes.Buffer
+ b.WriteString("phi [")
+ for i, edge := range v.Edges {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ // Be robust against malformed CFG.
+ if v.block == nil {
+ b.WriteString("??")
+ continue
+ }
+ block := -1
+ if i < len(v.block.Preds) {
+ block = v.block.Preds[i].Index
+ }
+ fmt.Fprintf(&b, "%d: ", block)
+ edgeVal := "<nil>" // be robust
+ if edge != nil {
+ edgeVal = relName(edge, v)
+ }
+ b.WriteString(edgeVal)
+ }
+ b.WriteString("]")
+ if v.Comment != "" {
+ b.WriteString(" #")
+ b.WriteString(v.Comment)
+ }
+ return b.String()
+}
+
+func printCall(v *CallCommon, prefix string, instr Instruction) string {
+ var b bytes.Buffer
+ b.WriteString(prefix)
+ if !v.IsInvoke() {
+ b.WriteString(relName(v.Value, instr))
+ } else {
+ fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name())
+ }
+ b.WriteString("(")
+ for i, arg := range v.Args {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(arg, instr))
+ }
+ if v.Signature().Variadic() {
+ b.WriteString("...")
+ }
+ b.WriteString(")")
+ return b.String()
+}
+
+func (c *CallCommon) String() string {
+ return printCall(c, "", nil)
+}
+
+func (v *Call) String() string {
+ return printCall(&v.Call, "", v)
+}
+
+func (v *BinOp) String() string {
+ return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v))
+}
+
+func (v *UnOp) String() string {
+ return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk))
+}
+
+func printConv(prefix string, v, x Value) string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("%s %s <- %s (%s)",
+ prefix,
+ relType(v.Type(), from),
+ relType(x.Type(), from),
+ relName(x, v.(Instruction)))
+}
+
+func (v *ChangeType) String() string { return printConv("changetype", v, v.X) }
+func (v *Convert) String() string { return printConv("convert", v, v.X) }
+func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) }
+func (v *MakeInterface) String() string { return printConv("make", v, v.X) }
+
+func (v *MakeClosure) String() string {
+ var b bytes.Buffer
+ fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
+ if v.Bindings != nil {
+ b.WriteString(" [")
+ for i, c := range v.Bindings {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(c, v))
+ }
+ b.WriteString("]")
+ }
+ return b.String()
+}
+
+func (v *MakeSlice) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("make %s %s %s",
+ relType(v.Type(), from),
+ relName(v.Len, v),
+ relName(v.Cap, v))
+}
+
+func (v *Slice) String() string {
+ var b bytes.Buffer
+ b.WriteString("slice ")
+ b.WriteString(relName(v.X, v))
+ b.WriteString("[")
+ if v.Low != nil {
+ b.WriteString(relName(v.Low, v))
+ }
+ b.WriteString(":")
+ if v.High != nil {
+ b.WriteString(relName(v.High, v))
+ }
+ if v.Max != nil {
+ b.WriteString(":")
+ b.WriteString(relName(v.Max, v))
+ }
+ b.WriteString("]")
+ return b.String()
+}
+
+func (v *MakeMap) String() string {
+ res := ""
+ if v.Reserve != nil {
+ res = relName(v.Reserve, v)
+ }
+ from := v.Parent().pkg()
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), res)
+}
+
+func (v *MakeChan) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v))
+}
+
+func (v *FieldAddr) String() string {
+ st := deref(v.X.Type()).Underlying().(*types.Struct)
+ // Be robust against a bad index.
+ name := "?"
+ if 0 <= v.Field && v.Field < st.NumFields() {
+ name = st.Field(v.Field).Name()
+ }
+ return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *Field) String() string {
+ st := v.X.Type().Underlying().(*types.Struct)
+ // Be robust against a bad index.
+ name := "?"
+ if 0 <= v.Field && v.Field < st.NumFields() {
+ name = st.Field(v.Field).Name()
+ }
+ return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *IndexAddr) String() string {
+ return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Index) String() string {
+ return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Lookup) String() string {
+ return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk))
+}
+
+func (v *Range) String() string {
+ return "range " + relName(v.X, v)
+}
+
+func (v *Next) String() string {
+ return "next " + relName(v.Iter, v)
+}
+
+func (v *TypeAssert) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from))
+}
+
+func (v *Extract) String() string {
+ return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index)
+}
+
+func (s *Jump) String() string {
+ // Be robust against malformed CFG.
+ block := -1
+ if s.block != nil && len(s.block.Succs) == 1 {
+ block = s.block.Succs[0].Index
+ }
+ return fmt.Sprintf("jump %d", block)
+}
+
+func (s *If) String() string {
+ // Be robust against malformed CFG.
+ tblock, fblock := -1, -1
+ if s.block != nil && len(s.block.Succs) == 2 {
+ tblock = s.block.Succs[0].Index
+ fblock = s.block.Succs[1].Index
+ }
+ return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock)
+}
+
+func (s *Go) String() string {
+ return printCall(&s.Call, "go ", s)
+}
+
+func (s *Panic) String() string {
+ return "panic " + relName(s.X, s)
+}
+
+func (s *Return) String() string {
+ var b bytes.Buffer
+ b.WriteString("return")
+ for i, r := range s.Results {
+ if i == 0 {
+ b.WriteString(" ")
+ } else {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(r, s))
+ }
+ return b.String()
+}
+
+func (*RunDefers) String() string {
+ return "rundefers"
+}
+
+func (s *Send) String() string {
+ return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s))
+}
+
+func (s *Defer) String() string {
+ return printCall(&s.Call, "defer ", s)
+}
+
+func (s *Select) String() string {
+ var b bytes.Buffer
+ for i, st := range s.States {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ if st.Dir == types.RecvOnly {
+ b.WriteString("<-")
+ b.WriteString(relName(st.Chan, s))
+ } else {
+ b.WriteString(relName(st.Chan, s))
+ b.WriteString("<-")
+ b.WriteString(relName(st.Send, s))
+ }
+ }
+ non := ""
+ if !s.Blocking {
+ non = "non"
+ }
+ return fmt.Sprintf("select %sblocking [%s]", non, b.String())
+}
+
+func (s *Store) String() string {
+ return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s))
+}
+
+func (s *MapUpdate) String() string {
+ return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
+}
+
+func (s *DebugRef) String() string {
+ p := s.Parent().Prog.Fset.Position(s.Pos())
+ var descr interface{}
+ if s.object != nil {
+ descr = s.object // e.g. "var x int"
+ } else {
+ descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr"
+ }
+ var addr string
+ if s.IsAddr {
+ addr = "address of "
+ }
+ return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name())
+}
+
+func (p *Package) String() string {
+ return "package " + p.Pkg.Path()
+}
+
+var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer
+
+func (p *Package) WriteTo(w io.Writer) (int64, error) {
+ var buf bytes.Buffer
+ WritePackage(&buf, p)
+ n, err := w.Write(buf.Bytes())
+ return int64(n), err
+}
+
+// WritePackage writes to buf a human-readable summary of p.
+func WritePackage(buf *bytes.Buffer, p *Package) {
+ fmt.Fprintf(buf, "%s:\n", p)
+
+ var names []string
+ maxname := 0
+ for name := range p.Members {
+ if l := len(name); l > maxname {
+ maxname = l
+ }
+ names = append(names, name)
+ }
+
+ from := p.Pkg
+ sort.Strings(names)
+ for _, name := range names {
+ switch mem := p.Members[name].(type) {
+ case *NamedConst:
+ fmt.Fprintf(buf, " const %-*s %s = %s\n",
+ maxname, name, mem.Name(), mem.Value.RelString(from))
+
+ case *Function:
+ fmt.Fprintf(buf, " func %-*s %s\n",
+ maxname, name, relType(mem.Type(), from))
+
+ case *Type:
+ fmt.Fprintf(buf, " type %-*s %s\n",
+ maxname, name, relType(mem.Type().Underlying(), from))
+ for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) {
+ fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from)))
+ }
+
+ case *Global:
+ fmt.Fprintf(buf, " var %-*s %s\n",
+ maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from))
+ }
+ }
+
+ fmt.Fprintf(buf, "\n")
+}
+
+func commaOk(x bool) string {
+ if x {
+ return ",ok"
+ }
+ return ""
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go
new file mode 100644
index 0000000..0d13beb
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/sanity.go
@@ -0,0 +1,521 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// An optional pass for sanity-checking invariants of the SSA representation.
+// Currently it checks CFG invariants but little at the instruction level.
+
+import (
+ "fmt"
+ "go/types"
+ "io"
+ "os"
+ "strings"
+)
+
+type sanity struct {
+ reporter io.Writer
+ fn *Function
+ block *BasicBlock
+ instrs map[Instruction]struct{}
+ insane bool
+}
+
+// sanityCheck performs integrity checking of the SSA representation
+// of the function fn and returns true if it was valid. Diagnostics
+// are written to reporter if non-nil, os.Stderr otherwise. Some
+// diagnostics are only warnings and do not imply a negative result.
+//
+// Sanity-checking is intended to facilitate the debugging of code
+// transformation passes.
+//
+func sanityCheck(fn *Function, reporter io.Writer) bool {
+ if reporter == nil {
+ reporter = os.Stderr
+ }
+ return (&sanity{reporter: reporter}).checkFunction(fn)
+}
+
+// mustSanityCheck is like sanityCheck but panics instead of returning
+// a negative result.
+//
+func mustSanityCheck(fn *Function, reporter io.Writer) {
+ if !sanityCheck(fn, reporter) {
+ fn.WriteTo(os.Stderr)
+ panic("SanityCheck failed")
+ }
+}
+
+func (s *sanity) diagnostic(prefix, format string, args ...interface{}) {
+ fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn)
+ if s.block != nil {
+ fmt.Fprintf(s.reporter, ", block %s", s.block)
+ }
+ io.WriteString(s.reporter, ": ")
+ fmt.Fprintf(s.reporter, format, args...)
+ io.WriteString(s.reporter, "\n")
+}
+
+func (s *sanity) errorf(format string, args ...interface{}) {
+ s.insane = true
+ s.diagnostic("Error", format, args...)
+}
+
+func (s *sanity) warnf(format string, args ...interface{}) {
+ s.diagnostic("Warning", format, args...)
+}
+
+// findDuplicate returns an arbitrary basic block that appeared more
+// than once in blocks, or nil if all were unique.
+func findDuplicate(blocks []*BasicBlock) *BasicBlock {
+ if len(blocks) < 2 {
+ return nil
+ }
+ if blocks[0] == blocks[1] {
+ return blocks[0]
+ }
+ // Slow path:
+ m := make(map[*BasicBlock]bool)
+ for _, b := range blocks {
+ if m[b] {
+ return b
+ }
+ m[b] = true
+ }
+ return nil
+}
+
+func (s *sanity) checkInstr(idx int, instr Instruction) {
+ switch instr := instr.(type) {
+ case *If, *Jump, *Return, *Panic:
+ s.errorf("control flow instruction not at end of block")
+ case *Phi:
+ if idx == 0 {
+ // It suffices to apply this check to just the first phi node.
+ if dup := findDuplicate(s.block.Preds); dup != nil {
+ s.errorf("phi node in block with duplicate predecessor %s", dup)
+ }
+ } else {
+ prev := s.block.Instrs[idx-1]
+ if _, ok := prev.(*Phi); !ok {
+ s.errorf("Phi instruction follows a non-Phi: %T", prev)
+ }
+ }
+ if ne, np := len(instr.Edges), len(s.block.Preds); ne != np {
+ s.errorf("phi node has %d edges but %d predecessors", ne, np)
+
+ } else {
+ for i, e := range instr.Edges {
+ if e == nil {
+ s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i])
+ }
+ }
+ }
+
+ case *Alloc:
+ if !instr.Heap {
+ found := false
+ for _, l := range s.fn.Locals {
+ if l == instr {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr)
+ }
+ }
+
+ case *BinOp:
+ case *Call:
+ case *ChangeInterface:
+ case *ChangeType:
+ case *Convert:
+ if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok {
+ if _, ok := instr.Type().Underlying().(*types.Basic); !ok {
+ s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type())
+ }
+ }
+
+ case *Defer:
+ case *Extract:
+ case *Field:
+ case *FieldAddr:
+ case *Go:
+ case *Index:
+ case *IndexAddr:
+ case *Lookup:
+ case *MakeChan:
+ case *MakeClosure:
+ numFree := len(instr.Fn.(*Function).FreeVars)
+ numBind := len(instr.Bindings)
+ if numFree != numBind {
+ s.errorf("MakeClosure has %d Bindings for function %s with %d free vars",
+ numBind, instr.Fn, numFree)
+
+ }
+ if recv := instr.Type().(*types.Signature).Recv(); recv != nil {
+ s.errorf("MakeClosure's type includes receiver %s", recv.Type())
+ }
+
+ case *MakeInterface:
+ case *MakeMap:
+ case *MakeSlice:
+ case *MapUpdate:
+ case *Next:
+ case *Range:
+ case *RunDefers:
+ case *Select:
+ case *Send:
+ case *Slice:
+ case *Store:
+ case *TypeAssert:
+ case *UnOp:
+ case *DebugRef:
+ // TODO(adonovan): implement checks.
+ default:
+ panic(fmt.Sprintf("Unknown instruction type: %T", instr))
+ }
+
+ if call, ok := instr.(CallInstruction); ok {
+ if call.Common().Signature() == nil {
+ s.errorf("nil signature: %s", call)
+ }
+ }
+
+ // Check that value-defining instructions have valid types
+ // and a valid referrer list.
+ if v, ok := instr.(Value); ok {
+ t := v.Type()
+ if t == nil {
+ s.errorf("no type: %s = %s", v.Name(), v)
+ } else if t == tRangeIter {
+ // not a proper type; ignore.
+ } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
+ s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
+ }
+ s.checkReferrerList(v)
+ }
+
+ // Untyped constants are legal as instruction Operands(),
+ // for example:
+ // _ = "foo"[0]
+ // or:
+ // if wordsize==64 {...}
+
+ // All other non-Instruction Values can be found via their
+ // enclosing Function or Package.
+}
+
+func (s *sanity) checkFinalInstr(instr Instruction) {
+ switch instr := instr.(type) {
+ case *If:
+ if nsuccs := len(s.block.Succs); nsuccs != 2 {
+ s.errorf("If-terminated block has %d successors; expected 2", nsuccs)
+ return
+ }
+ if s.block.Succs[0] == s.block.Succs[1] {
+ s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0])
+ return
+ }
+
+ case *Jump:
+ if nsuccs := len(s.block.Succs); nsuccs != 1 {
+ s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs)
+ return
+ }
+
+ case *Return:
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Return-terminated block has %d successors; expected none", nsuccs)
+ return
+ }
+ if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na {
+ s.errorf("%d-ary return in %d-ary function", na, nf)
+ }
+
+ case *Panic:
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Panic-terminated block has %d successors; expected none", nsuccs)
+ return
+ }
+
+ default:
+ s.errorf("non-control flow instruction at end of block")
+ }
+}
+
+func (s *sanity) checkBlock(b *BasicBlock, index int) {
+ s.block = b
+
+ if b.Index != index {
+ s.errorf("block has incorrect Index %d", b.Index)
+ }
+ if b.parent != s.fn {
+ s.errorf("block has incorrect parent %s", b.parent)
+ }
+
+ // Check all blocks are reachable.
+ // (The entry block is always implicitly reachable,
+ // as is the Recover block, if any.)
+ if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 {
+ s.warnf("unreachable block")
+ if b.Instrs == nil {
+ // Since this block is about to be pruned,
+ // tolerating transient problems in it
+ // simplifies other optimizations.
+ return
+ }
+ }
+
+ // Check predecessor and successor relations are dual,
+ // and that all blocks in CFG belong to same function.
+ for _, a := range b.Preds {
+ found := false
+ for _, bb := range a.Succs {
+ if bb == b {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs)
+ }
+ if a.parent != s.fn {
+ s.errorf("predecessor %s belongs to different function %s", a, a.parent)
+ }
+ }
+ for _, c := range b.Succs {
+ found := false
+ for _, bb := range c.Preds {
+ if bb == b {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds)
+ }
+ if c.parent != s.fn {
+ s.errorf("successor %s belongs to different function %s", c, c.parent)
+ }
+ }
+
+ // Check each instruction is sane.
+ n := len(b.Instrs)
+ if n == 0 {
+ s.errorf("basic block contains no instructions")
+ }
+ var rands [10]*Value // reuse storage
+ for j, instr := range b.Instrs {
+ if instr == nil {
+ s.errorf("nil instruction at index %d", j)
+ continue
+ }
+ if b2 := instr.Block(); b2 == nil {
+ s.errorf("nil Block() for instruction at index %d", j)
+ continue
+ } else if b2 != b {
+ s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j)
+ continue
+ }
+ if j < n-1 {
+ s.checkInstr(j, instr)
+ } else {
+ s.checkFinalInstr(instr)
+ }
+
+ // Check Instruction.Operands.
+ operands:
+ for i, op := range instr.Operands(rands[:0]) {
+ if op == nil {
+ s.errorf("nil operand pointer %d of %s", i, instr)
+ continue
+ }
+ val := *op
+ if val == nil {
+ continue // a nil operand is ok
+ }
+
+ // Check that "untyped" types only appear on constant operands.
+ if _, ok := (*op).(*Const); !ok {
+ if basic, ok := (*op).Type().(*types.Basic); ok {
+ if basic.Info()&types.IsUntyped != 0 {
+ s.errorf("operand #%d of %s is untyped: %s", i, instr, basic)
+ }
+ }
+ }
+
+ // Check that Operands that are also Instructions belong to same function.
+ // TODO(adonovan): also check their block dominates block b.
+ if val, ok := val.(Instruction); ok {
+ if val.Block() == nil {
+ s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val)
+ } else if val.Parent() != s.fn {
+ s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent())
+ }
+ }
+
+ // Check that each function-local operand of
+ // instr refers back to instr. (NB: quadratic)
+ switch val := val.(type) {
+ case *Const, *Global, *Builtin:
+ continue // not local
+ case *Function:
+ if val.parent == nil {
+ continue // only anon functions are local
+ }
+ }
+
+ // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined.
+
+ if refs := val.Referrers(); refs != nil {
+ for _, ref := range *refs {
+ if ref == instr {
+ continue operands
+ }
+ }
+ s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val)
+ } else {
+ s.errorf("operand %d of %s (%s) has no referrers", i, instr, val)
+ }
+ }
+ }
+}
+
+func (s *sanity) checkReferrerList(v Value) {
+ refs := v.Referrers()
+ if refs == nil {
+ s.errorf("%s has missing referrer list", v.Name())
+ return
+ }
+ for i, ref := range *refs {
+ if _, ok := s.instrs[ref]; !ok {
+ s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
+ }
+ }
+}
+
+func (s *sanity) checkFunction(fn *Function) bool {
+ // TODO(adonovan): check Function invariants:
+ // - check params match signature
+ // - check transient fields are nil
+ // - warn if any fn.Locals do not appear among block instructions.
+ s.fn = fn
+ if fn.Prog == nil {
+ s.errorf("nil Prog")
+ }
+
+ fn.String() // must not crash
+ fn.RelString(fn.pkg()) // must not crash
+
+ // All functions have a package, except delegates (which are
+ // shared across packages, or duplicated as weak symbols in a
+ // separate-compilation model), and error.Error.
+ if fn.Pkg == nil {
+ if strings.HasPrefix(fn.Synthetic, "wrapper ") ||
+ strings.HasPrefix(fn.Synthetic, "bound ") ||
+ strings.HasPrefix(fn.Synthetic, "thunk ") ||
+ strings.HasSuffix(fn.name, "Error") {
+ // ok
+ } else {
+ s.errorf("nil Pkg")
+ }
+ }
+ if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn {
+ s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
+ }
+ for i, l := range fn.Locals {
+ if l.Parent() != fn {
+ s.errorf("Local %s at index %d has wrong parent", l.Name(), i)
+ }
+ if l.Heap {
+ s.errorf("Local %s at index %d has Heap flag set", l.Name(), i)
+ }
+ }
+ // Build the set of valid referrers.
+ s.instrs = make(map[Instruction]struct{})
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ s.instrs[instr] = struct{}{}
+ }
+ }
+ for i, p := range fn.Params {
+ if p.Parent() != fn {
+ s.errorf("Param %s at index %d has wrong parent", p.Name(), i)
+ }
+ s.checkReferrerList(p)
+ }
+ for i, fv := range fn.FreeVars {
+ if fv.Parent() != fn {
+ s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i)
+ }
+ s.checkReferrerList(fv)
+ }
+
+ if fn.Blocks != nil && len(fn.Blocks) == 0 {
+ // Function _had_ blocks (so it's not external) but
+ // they were "optimized" away, even the entry block.
+ s.errorf("Blocks slice is non-nil but empty")
+ }
+ for i, b := range fn.Blocks {
+ if b == nil {
+ s.warnf("nil *BasicBlock at f.Blocks[%d]", i)
+ continue
+ }
+ s.checkBlock(b, i)
+ }
+ if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover {
+ s.errorf("Recover block is not in Blocks slice")
+ }
+
+ s.block = nil
+ for i, anon := range fn.AnonFuncs {
+ if anon.Parent() != fn {
+ s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent())
+ }
+ }
+ s.fn = nil
+ return !s.insane
+}
+
+// sanityCheckPackage checks invariants of packages upon creation.
+// It does not require that the package is built.
+// Unlike sanityCheck (for functions), it just panics at the first error.
+func sanityCheckPackage(pkg *Package) {
+ if pkg.Pkg == nil {
+ panic(fmt.Sprintf("Package %s has no Object", pkg))
+ }
+ pkg.String() // must not crash
+
+ for name, mem := range pkg.Members {
+ if name != mem.Name() {
+ panic(fmt.Sprintf("%s: %T.Name() = %s, want %s",
+ pkg.Pkg.Path(), mem, mem.Name(), name))
+ }
+ obj := mem.Object()
+ if obj == nil {
+ // This check is sound because fields
+ // {Global,Function}.object have type
+ // types.Object. (If they were declared as
+ // *types.{Var,Func}, we'd have a non-empty
+ // interface containing a nil pointer.)
+
+ continue // not all members have typechecker objects
+ }
+ if obj.Name() != name {
+ if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") {
+ // Ok. The name of a declared init function varies between
+ // its types.Func ("init") and its ssa.Function ("init#%d").
+ } else {
+ panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s",
+ pkg.Pkg.Path(), mem, obj.Name(), name))
+ }
+ }
+ if obj.Pos() != mem.Pos() {
+ panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos()))
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go
new file mode 100644
index 0000000..6d2223e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/source.go
@@ -0,0 +1,293 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines utilities for working with source positions
+// or source-level named entities ("objects").
+
+// TODO(adonovan): test that {Value,Instruction}.Pos() positions match
+// the originating syntax, as specified.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// EnclosingFunction returns the function that contains the syntax
+// node denoted by path.
+//
+// Syntax associated with package-level variable specifications is
+// enclosed by the package's init() function.
+//
+// Returns nil if not found; reasons might include:
+// - the node is not enclosed by any function.
+// - the node is within an anonymous function (FuncLit) and
+// its SSA function has not been created yet
+// (pkg.Build() has not yet been called).
+//
+func EnclosingFunction(pkg *Package, path []ast.Node) *Function {
+ // Start with package-level function...
+ fn := findEnclosingPackageLevelFunction(pkg, path)
+ if fn == nil {
+ return nil // not in any function
+ }
+
+ // ...then walk down the nested anonymous functions.
+ n := len(path)
+outer:
+ for i := range path {
+ if lit, ok := path[n-1-i].(*ast.FuncLit); ok {
+ for _, anon := range fn.AnonFuncs {
+ if anon.Pos() == lit.Type.Func {
+ fn = anon
+ continue outer
+ }
+ }
+ // SSA function not found:
+ // - package not yet built, or maybe
+ // - builder skipped FuncLit in dead block
+ // (in principle; but currently the Builder
+ // generates even dead FuncLits).
+ return nil
+ }
+ }
+ return fn
+}
+
+// HasEnclosingFunction returns true if the AST node denoted by path
+// is contained within the declaration of some function or
+// package-level variable.
+//
+// Unlike EnclosingFunction, the behaviour of this function does not
+// depend on whether SSA code for pkg has been built, so it can be
+// used to quickly reject check inputs that will cause
+// EnclosingFunction to fail, prior to SSA building.
+//
+func HasEnclosingFunction(pkg *Package, path []ast.Node) bool {
+ return findEnclosingPackageLevelFunction(pkg, path) != nil
+}
+
+// findEnclosingPackageLevelFunction returns the Function
+// corresponding to the package-level function enclosing path.
+//
+func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function {
+ if n := len(path); n >= 2 { // [... {Gen,Func}Decl File]
+ switch decl := path[n-2].(type) {
+ case *ast.GenDecl:
+ if decl.Tok == token.VAR && n >= 3 {
+ // Package-level 'var' initializer.
+ return pkg.init
+ }
+
+ case *ast.FuncDecl:
+ if decl.Recv == nil && decl.Name.Name == "init" {
+ // Explicit init() function.
+ for _, b := range pkg.init.Blocks {
+ for _, instr := range b.Instrs {
+ if instr, ok := instr.(*Call); ok {
+ if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos {
+ return callee
+ }
+ }
+ }
+ }
+ // Hack: return non-nil when SSA is not yet
+ // built so that HasEnclosingFunction works.
+ return pkg.init
+ }
+ // Declared function/method.
+ return findNamedFunc(pkg, decl.Name.NamePos)
+ }
+ }
+ return nil // not in any function
+}
+
+// findNamedFunc returns the named function whose FuncDecl.Ident is at
+// position pos.
+//
+func findNamedFunc(pkg *Package, pos token.Pos) *Function {
+ // Look at all package members and method sets of named types.
+ // Not very efficient.
+ for _, mem := range pkg.Members {
+ switch mem := mem.(type) {
+ case *Function:
+ if mem.Pos() == pos {
+ return mem
+ }
+ case *Type:
+ mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type()))
+ for i, n := 0, mset.Len(); i < n; i++ {
+ // Don't call Program.Method: avoid creating wrappers.
+ obj := mset.At(i).Obj().(*types.Func)
+ if obj.Pos() == pos {
+ return pkg.values[obj].(*Function)
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// ValueForExpr returns the SSA Value that corresponds to non-constant
+// expression e.
+//
+// It returns nil if no value was found, e.g.
+// - the expression is not lexically contained within f;
+// - f was not built with debug information; or
+// - e is a constant expression. (For efficiency, no debug
+// information is stored for constants. Use
+// go/types.Info.Types[e].Value instead.)
+// - e is a reference to nil or a built-in function.
+// - the value was optimised away.
+//
+// If e is an addressable expression used in an lvalue context,
+// value is the address denoted by e, and isAddr is true.
+//
+// The types of e (or &e, if isAddr) and the result are equal
+// (modulo "untyped" bools resulting from comparisons).
+//
+// (Tip: to find the ssa.Value given a source position, use
+// importer.PathEnclosingInterval to locate the ast.Node, then
+// EnclosingFunction to locate the Function, then ValueForExpr to find
+// the ssa.Value.)
+//
+func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
+ if f.debugInfo() { // (opt)
+ e = unparen(e)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if ref, ok := instr.(*DebugRef); ok {
+ if ref.Expr == e {
+ return ref.X, ref.IsAddr
+ }
+ }
+ }
+ }
+ }
+ return
+}
+
+// --- Lookup functions for source-level named entities (types.Objects) ---
+
+// Package returns the SSA Package corresponding to the specified
+// type-checker package object.
+// It returns nil if no such SSA package has been created.
+//
+func (prog *Program) Package(obj *types.Package) *Package {
+ return prog.packages[obj]
+}
+
+// packageLevelValue returns the package-level value corresponding to
+// the specified named object, which may be a package-level const
+// (*Const), var (*Global) or func (*Function) of some package in
+// prog. It returns nil if the object is not found.
+//
+func (prog *Program) packageLevelValue(obj types.Object) Value {
+ if pkg, ok := prog.packages[obj.Pkg()]; ok {
+ return pkg.values[obj]
+ }
+ return nil
+}
+
+// FuncValue returns the concrete Function denoted by the source-level
+// named function obj, or nil if obj denotes an interface method.
+//
+// TODO(adonovan): check the invariant that obj.Type() matches the
+// result's Signature, both in the params/results and in the receiver.
+//
+func (prog *Program) FuncValue(obj *types.Func) *Function {
+ fn, _ := prog.packageLevelValue(obj).(*Function)
+ return fn
+}
+
+// ConstValue returns the SSA Value denoted by the source-level named
+// constant obj.
+//
+func (prog *Program) ConstValue(obj *types.Const) *Const {
+ // TODO(adonovan): opt: share (don't reallocate)
+ // Consts for const objects and constant ast.Exprs.
+
+ // Universal constant? {true,false,nil}
+ if obj.Parent() == types.Universe {
+ return NewConst(obj.Val(), obj.Type())
+ }
+ // Package-level named constant?
+ if v := prog.packageLevelValue(obj); v != nil {
+ return v.(*Const)
+ }
+ return NewConst(obj.Val(), obj.Type())
+}
+
+// VarValue returns the SSA Value that corresponds to a specific
+// identifier denoting the source-level named variable obj.
+//
+// VarValue returns nil if a local variable was not found, perhaps
+// because its package was not built, the debug information was not
+// requested during SSA construction, or the value was optimized away.
+//
+// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval),
+// and that ident must resolve to obj.
+//
+// pkg is the package enclosing the reference. (A reference to a var
+// always occurs within a function, so we need to know where to find it.)
+//
+// If the identifier is a field selector and its base expression is
+// non-addressable, then VarValue returns the value of that field.
+// For example:
+// func f() struct {x int}
+// f().x // VarValue(x) returns a *Field instruction of type int
+//
+// All other identifiers denote addressable locations (variables).
+// For them, VarValue may return either the variable's address or its
+// value, even when the expression is evaluated only for its value; the
+// situation is reported by isAddr, the second component of the result.
+//
+// If !isAddr, the returned value is the one associated with the
+// specific identifier. For example,
+// var x int // VarValue(x) returns Const 0 here
+// x = 1 // VarValue(x) returns Const 1 here
+//
+// It is not specified whether the value or the address is returned in
+// any particular case, as it may depend upon optimizations performed
+// during SSA code generation, such as registerization, constant
+// folding, avoidance of materialization of subexpressions, etc.
+//
+func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) {
+ // All references to a var are local to some function, possibly init.
+ fn := EnclosingFunction(pkg, ref)
+ if fn == nil {
+ return // e.g. def of struct field; SSA not built?
+ }
+
+ id := ref[0].(*ast.Ident)
+
+ // Defining ident of a parameter?
+ if id.Pos() == obj.Pos() {
+ for _, param := range fn.Params {
+ if param.Object() == obj {
+ return param, false
+ }
+ }
+ }
+
+ // Other ident?
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ if dr, ok := instr.(*DebugRef); ok {
+ if dr.Pos() == id.Pos() {
+ return dr.X, dr.IsAddr
+ }
+ }
+ }
+ }
+
+ // Defining ident of package-level var?
+ if v := prog.packageLevelValue(obj); v != nil {
+ return v.(*Global), true
+ }
+
+ return // e.g. debug info not requested, or var optimized away
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/source_test.go b/vendor/golang.org/x/tools/go/ssa/source_test.go
new file mode 100644
index 0000000..43051f8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/source_test.go
@@ -0,0 +1,397 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+// This file defines tests of source-level debugging utilities.
+
+import (
+ "fmt"
+ "go/ast"
+ exact "go/constant"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+ "regexp"
+ "runtime"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+func TestObjValueLookup(t *testing.T) {
+ if runtime.GOOS == "android" {
+ t.Skipf("no testdata directory on %s", runtime.GOOS)
+ }
+
+ conf := loader.Config{ParserMode: parser.ParseComments}
+ f, err := conf.ParseFile("testdata/objlookup.go", nil)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ conf.CreateFromFiles("main", f)
+
+ // Maps each var Ident (represented "name:linenum") to the
+ // kind of ssa.Value we expect (represented "Constant", "&Alloc").
+ expectations := make(map[string]string)
+
+ // Find all annotations of form x::BinOp, &y::Alloc, etc.
+ re := regexp.MustCompile(`(\b|&)?(\w*)::(\w*)\b`)
+ for _, c := range f.Comments {
+ text := c.Text()
+ pos := conf.Fset.Position(c.Pos())
+ for _, m := range re.FindAllStringSubmatch(text, -1) {
+ key := fmt.Sprintf("%s:%d", m[2], pos.Line)
+ value := m[1] + m[3]
+ expectations[key] = value
+ }
+ }
+
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ prog := ssautil.CreateProgram(iprog, 0 /*|ssa.PrintFunctions*/)
+ mainInfo := iprog.Created[0]
+ mainPkg := prog.Package(mainInfo.Pkg)
+ mainPkg.SetDebugMode(true)
+ mainPkg.Build()
+
+ var varIds []*ast.Ident
+ var varObjs []*types.Var
+ for id, obj := range mainInfo.Defs {
+ // Check invariants for func and const objects.
+ switch obj := obj.(type) {
+ case *types.Func:
+ checkFuncValue(t, prog, obj)
+
+ case *types.Const:
+ checkConstValue(t, prog, obj)
+
+ case *types.Var:
+ if id.Name == "_" {
+ continue
+ }
+ varIds = append(varIds, id)
+ varObjs = append(varObjs, obj)
+ }
+ }
+ for id, obj := range mainInfo.Uses {
+ if obj, ok := obj.(*types.Var); ok {
+ varIds = append(varIds, id)
+ varObjs = append(varObjs, obj)
+ }
+ }
+
+ // Check invariants for var objects.
+ // The result varies based on the specific Ident.
+ for i, id := range varIds {
+ obj := varObjs[i]
+ ref, _ := astutil.PathEnclosingInterval(f, id.Pos(), id.Pos())
+ pos := prog.Fset.Position(id.Pos())
+ exp := expectations[fmt.Sprintf("%s:%d", id.Name, pos.Line)]
+ if exp == "" {
+ t.Errorf("%s: no expectation for var ident %s ", pos, id.Name)
+ continue
+ }
+ wantAddr := false
+ if exp[0] == '&' {
+ wantAddr = true
+ exp = exp[1:]
+ }
+ checkVarValue(t, prog, mainPkg, ref, obj, exp, wantAddr)
+ }
+}
+
+func checkFuncValue(t *testing.T, prog *ssa.Program, obj *types.Func) {
+ fn := prog.FuncValue(obj)
+ // fmt.Printf("FuncValue(%s) = %s\n", obj, fn) // debugging
+ if fn == nil {
+ if obj.Name() != "interfaceMethod" {
+ t.Errorf("FuncValue(%s) == nil", obj)
+ }
+ return
+ }
+ if fnobj := fn.Object(); fnobj != obj {
+ t.Errorf("FuncValue(%s).Object() == %s; value was %s",
+ obj, fnobj, fn.Name())
+ return
+ }
+ if !types.Identical(fn.Type(), obj.Type()) {
+ t.Errorf("FuncValue(%s).Type() == %s", obj, fn.Type())
+ return
+ }
+}
+
+func checkConstValue(t *testing.T, prog *ssa.Program, obj *types.Const) {
+ c := prog.ConstValue(obj)
+ // fmt.Printf("ConstValue(%s) = %s\n", obj, c) // debugging
+ if c == nil {
+ t.Errorf("ConstValue(%s) == nil", obj)
+ return
+ }
+ if !types.Identical(c.Type(), obj.Type()) {
+ t.Errorf("ConstValue(%s).Type() == %s", obj, c.Type())
+ return
+ }
+ if obj.Name() != "nil" {
+ if !exact.Compare(c.Value, token.EQL, obj.Val()) {
+ t.Errorf("ConstValue(%s).Value (%s) != %s",
+ obj, c.Value, obj.Val())
+ return
+ }
+ }
+}
+
+func checkVarValue(t *testing.T, prog *ssa.Program, pkg *ssa.Package, ref []ast.Node, obj *types.Var, expKind string, wantAddr bool) {
+ // The prefix of all assertions messages.
+ prefix := fmt.Sprintf("VarValue(%s @ L%d)",
+ obj, prog.Fset.Position(ref[0].Pos()).Line)
+
+ v, gotAddr := prog.VarValue(obj, pkg, ref)
+
+ // Kind is the concrete type of the ssa Value.
+ gotKind := "nil"
+ if v != nil {
+ gotKind = fmt.Sprintf("%T", v)[len("*ssa."):]
+ }
+
+ // fmt.Printf("%s = %v (kind %q; expect %q) wantAddr=%t gotAddr=%t\n", prefix, v, gotKind, expKind, wantAddr, gotAddr) // debugging
+
+ // Check the kinds match.
+ // "nil" indicates expected failure (e.g. optimized away).
+ if expKind != gotKind {
+ t.Errorf("%s concrete type == %s, want %s", prefix, gotKind, expKind)
+ }
+
+ // Check the types match.
+ // If wantAddr, the expected type is the object's address.
+ if v != nil {
+ expType := obj.Type()
+ if wantAddr {
+ expType = types.NewPointer(expType)
+ if !gotAddr {
+ t.Errorf("%s: got value, want address", prefix)
+ }
+ } else if gotAddr {
+ t.Errorf("%s: got address, want value", prefix)
+ }
+ if !types.Identical(v.Type(), expType) {
+ t.Errorf("%s.Type() == %s, want %s", prefix, v.Type(), expType)
+ }
+ }
+}
+
+// Ensure that, in debug mode, we can determine the ssa.Value
+// corresponding to every ast.Expr.
+func TestValueForExpr(t *testing.T) {
+ testValueForExpr(t, "testdata/valueforexpr.go")
+}
+
+func testValueForExpr(t *testing.T, testfile string) {
+ if runtime.GOOS == "android" {
+ t.Skipf("no testdata dir on %s", runtime.GOOS)
+ }
+
+ conf := loader.Config{ParserMode: parser.ParseComments}
+ f, err := conf.ParseFile(testfile, nil)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ conf.CreateFromFiles("main", f)
+
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ mainInfo := iprog.Created[0]
+
+ prog := ssautil.CreateProgram(iprog, 0)
+ mainPkg := prog.Package(mainInfo.Pkg)
+ mainPkg.SetDebugMode(true)
+ mainPkg.Build()
+
+ if false {
+ // debugging
+ for _, mem := range mainPkg.Members {
+ if fn, ok := mem.(*ssa.Function); ok {
+ fn.WriteTo(os.Stderr)
+ }
+ }
+ }
+
+ // Find the actual AST node for each canonical position.
+ parenExprByPos := make(map[token.Pos]*ast.ParenExpr)
+ ast.Inspect(f, func(n ast.Node) bool {
+ if n != nil {
+ if e, ok := n.(*ast.ParenExpr); ok {
+ parenExprByPos[e.Pos()] = e
+ }
+ }
+ return true
+ })
+
+ // Find all annotations of form /*@kind*/.
+ for _, c := range f.Comments {
+ text := strings.TrimSpace(c.Text())
+ if text == "" || text[0] != '@' {
+ continue
+ }
+ text = text[1:]
+ pos := c.End() + 1
+ position := prog.Fset.Position(pos)
+ var e ast.Expr
+ if target := parenExprByPos[pos]; target == nil {
+ t.Errorf("%s: annotation doesn't precede ParenExpr: %q", position, text)
+ continue
+ } else {
+ e = target.X
+ }
+
+ path, _ := astutil.PathEnclosingInterval(f, pos, pos)
+ if path == nil {
+ t.Errorf("%s: can't find AST path from root to comment: %s", position, text)
+ continue
+ }
+
+ fn := ssa.EnclosingFunction(mainPkg, path)
+ if fn == nil {
+ t.Errorf("%s: can't find enclosing function", position)
+ continue
+ }
+
+ v, gotAddr := fn.ValueForExpr(e) // (may be nil)
+ got := strings.TrimPrefix(fmt.Sprintf("%T", v), "*ssa.")
+ if want := text; got != want {
+ t.Errorf("%s: got value %q, want %q", position, got, want)
+ }
+ if v != nil {
+ T := v.Type()
+ if gotAddr {
+ T = T.Underlying().(*types.Pointer).Elem() // deref
+ }
+ if !types.Identical(T, mainInfo.TypeOf(e)) {
+ t.Errorf("%s: got type %s, want %s", position, mainInfo.TypeOf(e), T)
+ }
+ }
+ }
+}
+
+// findInterval parses input and returns the [start, end) positions of
+// the first occurrence of substr in input. f==nil indicates failure;
+// an error has already been reported in that case.
+//
+func findInterval(t *testing.T, fset *token.FileSet, input, substr string) (f *ast.File, start, end token.Pos) {
+ f, err := parser.ParseFile(fset, "<input>", input, 0)
+ if err != nil {
+ t.Errorf("parse error: %s", err)
+ return
+ }
+
+ i := strings.Index(input, substr)
+ if i < 0 {
+ t.Errorf("%q is not a substring of input", substr)
+ f = nil
+ return
+ }
+
+ filePos := fset.File(f.Package)
+ return f, filePos.Pos(i), filePos.Pos(i + len(substr))
+}
+
+func TestEnclosingFunction(t *testing.T) {
+ tests := []struct {
+ input string // the input file
+ substr string // first occurrence of this string denotes interval
+ fn string // name of expected containing function
+ }{
+ // We use distinctive numbers as syntactic landmarks.
+
+ // Ordinary function:
+ {`package main
+ func f() { println(1003) }`,
+ "100", "main.f"},
+ // Methods:
+ {`package main
+ type T int
+ func (t T) f() { println(200) }`,
+ "200", "(main.T).f"},
+ // Function literal:
+ {`package main
+ func f() { println(func() { print(300) }) }`,
+ "300", "main.f$1"},
+ // Doubly nested
+ {`package main
+ func f() { println(func() { print(func() { print(350) })})}`,
+ "350", "main.f$1$1"},
+ // Implicit init for package-level var initializer.
+ {"package main; var a = 400", "400", "main.init"},
+ // No code for constants:
+ {"package main; const a = 500", "500", "(none)"},
+ // Explicit init()
+ {"package main; func init() { println(600) }", "600", "main.init#1"},
+ // Multiple explicit init functions:
+ {`package main
+ func init() { println("foo") }
+ func init() { println(800) }`,
+ "800", "main.init#2"},
+ // init() containing FuncLit.
+ {`package main
+ func init() { println(func(){print(900)}) }`,
+ "900", "main.init#1$1"},
+ }
+ for _, test := range tests {
+ conf := loader.Config{Fset: token.NewFileSet()}
+ f, start, end := findInterval(t, conf.Fset, test.input, test.substr)
+ if f == nil {
+ continue
+ }
+ path, exact := astutil.PathEnclosingInterval(f, start, end)
+ if !exact {
+ t.Errorf("EnclosingFunction(%q) not exact", test.substr)
+ continue
+ }
+
+ conf.CreateFromFiles("main", f)
+
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+ prog := ssautil.CreateProgram(iprog, 0)
+ pkg := prog.Package(iprog.Created[0].Pkg)
+ pkg.Build()
+
+ name := "(none)"
+ fn := ssa.EnclosingFunction(pkg, path)
+ if fn != nil {
+ name = fn.String()
+ }
+
+ if name != test.fn {
+ t.Errorf("EnclosingFunction(%q in %q) got %s, want %s",
+ test.substr, test.input, name, test.fn)
+ continue
+ }
+
+ // While we're here: test HasEnclosingFunction.
+ if has := ssa.HasEnclosingFunction(pkg, path); has != (fn != nil) {
+ t.Errorf("HasEnclosingFunction(%q in %q) got %v, want %v",
+ test.substr, test.input, has, fn != nil)
+ continue
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go
new file mode 100644
index 0000000..e8350f1
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssa.go
@@ -0,0 +1,1696 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This package defines a high-level intermediate representation for
+// Go programs using static single-assignment (SSA) form.
+
+import (
+ "fmt"
+ "go/ast"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "sync"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// A Program is a partial or complete Go program converted to SSA form.
+type Program struct {
+ Fset *token.FileSet // position information for the files of this Program
+ imported map[string]*Package // all importable Packages, keyed by import path
+ packages map[*types.Package]*Package // all loaded Packages, keyed by object
+ mode BuilderMode // set of mode bits for SSA construction
+ MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets
+
+ methodsMu sync.Mutex // guards the following maps:
+ methodSets typeutil.Map // maps type to its concrete methodSet
+ runtimeTypes typeutil.Map // types for which rtypes are needed
+ canon typeutil.Map // type canonicalization map
+ bounds map[*types.Func]*Function // bounds for curried x.Method closures
+ thunks map[selectionKey]*Function // thunks for T.Method expressions
+}
+
+// A Package is a single analyzed Go package containing Members for
+// all package-level functions, variables, constants and types it
+// declares. These may be accessed directly via Members, or via the
+// type-specific accessor methods Func, Type, Var and Const.
+//
+// Members also contains entries for "init" (the synthetic package
+// initializer) and "init#%d", the nth declared init function,
+// and unspecified other things too.
+//
+type Package struct {
+ Prog *Program // the owning program
+ Pkg *types.Package // the corresponding go/types.Package
+ Members map[string]Member // all package members keyed by name (incl. init and init#%d)
+ values map[types.Object]Value // package members (incl. types and methods), keyed by object
+ init *Function // Func("init"); the package's init function
+ debug bool // include full debug info in this package
+
+ // The following fields are set transiently, then cleared
+ // after building.
+ buildOnce sync.Once // ensures package building occurs once
+ ninit int32 // number of init functions
+ info *types.Info // package type information
+ files []*ast.File // package ASTs
+}
+
+// A Member is a member of a Go package, implemented by *NamedConst,
+// *Global, *Function, or *Type; they are created by package-level
+// const, var, func and type declarations respectively.
+//
+type Member interface {
+ Name() string // declared name of the package member
+ String() string // package-qualified name of the package member
+ RelString(*types.Package) string // like String, but relative refs are unqualified
+ Object() types.Object // typechecker's object for this member, if any
+ Pos() token.Pos // position of member's declaration, if known
+ Type() types.Type // type of the package member
+ Token() token.Token // token.{VAR,FUNC,CONST,TYPE}
+ Package() *Package // the containing package
+}
+
+// A Type is a Member of a Package representing a package-level named type.
+type Type struct {
+ object *types.TypeName
+ pkg *Package
+}
+
+// A NamedConst is a Member of a Package representing a package-level
+// named constant.
+//
+// Pos() returns the position of the declaring ast.ValueSpec.Names[*]
+// identifier.
+//
+// NB: a NamedConst is not a Value; it contains a constant Value, which
+// it augments with the name and position of its 'const' declaration.
+//
+type NamedConst struct {
+ object *types.Const
+ Value *Const
+ pkg *Package
+}
+
+// A Value is an SSA value that can be referenced by an instruction.
+type Value interface {
+ // Name returns the name of this value, and determines how
+ // this Value appears when used as an operand of an
+ // Instruction.
+ //
+ // This is the same as the source name for Parameters,
+ // Builtins, Functions, FreeVars, Globals.
+ // For constants, it is a representation of the constant's value
+ // and type. For all other Values this is the name of the
+ // virtual register defined by the instruction.
+ //
+ // The name of an SSA Value is not semantically significant,
+ // and may not even be unique within a function.
+ Name() string
+
+ // If this value is an Instruction, String returns its
+ // disassembled form; otherwise it returns unspecified
+ // human-readable information about the Value, such as its
+ // kind, name and type.
+ String() string
+
+ // Type returns the type of this value. Many instructions
+ // (e.g. IndexAddr) change their behaviour depending on the
+ // types of their operands.
+ Type() types.Type
+
+ // Parent returns the function to which this Value belongs.
+ // It returns nil for named Functions, Builtin, Const and Global.
+ Parent() *Function
+
+ // Referrers returns the list of instructions that have this
+ // value as one of their operands; it may contain duplicates
+ // if an instruction has a repeated operand.
+ //
+ // Referrers actually returns a pointer through which the
+ // caller may perform mutations to the object's state.
+ //
+ // Referrers is currently only defined if Parent()!=nil,
+ // i.e. for the function-local values FreeVar, Parameter,
+ // Functions (iff anonymous) and all value-defining instructions.
+ // It returns nil for named Functions, Builtin, Const and Global.
+ //
+ // Instruction.Operands contains the inverse of this relation.
+ Referrers() *[]Instruction
+
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this value,
+ // or token.NoPos if it was not explicit in the source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Lparen
+ // for an *ast.CallExpr. This permits a compact but
+ // approximate mapping from Values to source positions for use
+ // in diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Value
+ // corresponds to an ast.Expr; use Function.ValueForExpr
+ // instead. NB: it requires that the function was built with
+ // debug information.)
+ Pos() token.Pos
+}
+
+// An Instruction is an SSA instruction that computes a new Value or
+// has some effect.
+//
+// An Instruction that defines a value (e.g. BinOp) also implements
+// the Value interface; an Instruction that only has an effect (e.g. Store)
+// does not.
+//
+type Instruction interface {
+ // String returns the disassembled form of this value.
+ //
+ // Examples of Instructions that are Values:
+ // "x + y" (BinOp)
+ // "len([])" (Call)
+ // Note that the name of the Value is not printed.
+ //
+ // Examples of Instructions that are not Values:
+ // "return x" (Return)
+ // "*y = x" (Store)
+ //
+ // (The separation Value.Name() from Value.String() is useful
+ // for some analyses which distinguish the operation from the
+ // value it defines, e.g., 'y = local int' is both an allocation
+ // of memory 'local int' and a definition of a pointer y.)
+ String() string
+
+ // Parent returns the function to which this instruction
+ // belongs.
+ Parent() *Function
+
+ // Block returns the basic block to which this instruction
+ // belongs.
+ Block() *BasicBlock
+
+ // setBlock sets the basic block to which this instruction belongs.
+ setBlock(*BasicBlock)
+
+ // Operands returns the operands of this instruction: the
+ // set of Values it references.
+ //
+ // Specifically, it appends their addresses to rands, a
+ // user-provided slice, and returns the resulting slice,
+ // permitting avoidance of memory allocation.
+ //
+ // The operands are appended in undefined order, but the order
+ // is consistent for a given Instruction; the addresses are
+ // always non-nil but may point to a nil Value. Clients may
+ // store through the pointers, e.g. to effect a value
+ // renaming.
+ //
+ // Value.Referrers is a subset of the inverse of this
+ // relation. (Referrers are not tracked for all types of
+ // Values.)
+ Operands(rands []*Value) []*Value
+
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this
+ // instruction, or token.NoPos if it was not explicit in the
+ // source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Go token
+ // for an *ast.GoStmt. This permits a compact but approximate
+ // mapping from Instructions to source positions for use in
+ // diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Instruction
+ // corresponds to an ast.Expr; see the notes for Value.Pos.
+ // This position may be used to determine which non-Value
+ // Instruction corresponds to some ast.Stmts, but not all: If
+ // and Jump instructions have no Pos(), for example.)
+ Pos() token.Pos
+}
+
+// A Node is a node in the SSA value graph. Every concrete type that
+// implements Node is also either a Value, an Instruction, or both.
+//
+// Node contains the methods common to Value and Instruction, plus the
+// Operands and Referrers methods generalized to return nil for
+// non-Instructions and non-Values, respectively.
+//
+// Node is provided to simplify SSA graph algorithms. Clients should
+// use the more specific and informative Value or Instruction
+// interfaces where appropriate.
+//
+type Node interface {
+ // Common methods:
+ String() string
+ Pos() token.Pos
+ Parent() *Function
+
+ // Partial methods:
+ Operands(rands []*Value) []*Value // nil for non-Instructions
+ Referrers() *[]Instruction // nil for non-Values
+}
+
+// Function represents the parameters, results, and code of a function
+// or method.
+//
+// If Blocks is nil, this indicates an external function for which no
+// Go source code is available. In this case, FreeVars and Locals
+// are nil too. Clients performing whole-program analysis must
+// handle external functions specially.
+//
+// Blocks contains the function's control-flow graph (CFG).
+// Blocks[0] is the function entry point; block order is not otherwise
+// semantically significant, though it may affect the readability of
+// the disassembly.
+// To iterate over the blocks in dominance order, use DomPreorder().
+//
+// Recover is an optional second entry point to which control resumes
+// after a recovered panic. The Recover block may contain only a return
+// statement, preceded by a load of the function's named return
+// parameters, if any.
+//
+// A nested function (Parent()!=nil) that refers to one or more
+// lexically enclosing local variables ("free variables") has FreeVars.
+// Such functions cannot be called directly but require a
+// value created by MakeClosure which, via its Bindings, supplies
+// values for these parameters.
+//
+// If the function is a method (Signature.Recv() != nil) then the first
+// element of Params is the receiver parameter.
+//
+// A Go package may declare many functions called "init".
+// For each one, Object().Name() returns "init" but Name() returns
+// "init#1", etc, in declaration order.
+//
+// Pos() returns the declaring ast.FuncLit.Type.Func or the position
+// of the ast.FuncDecl.Name, if the function was explicit in the
+// source. Synthetic wrappers, for which Synthetic != "", may share
+// the same position as the function they wrap.
+// Syntax.Pos() always returns the position of the declaring "func" token.
+//
+// Type() returns the function's Signature.
+//
+type Function struct {
+ name string
+ object types.Object // a declared *types.Func or one of its wrappers
+ method *types.Selection // info about provenance of synthetic methods
+ Signature *types.Signature
+ pos token.Pos
+
+ Synthetic string // provenance of synthetic function; "" for true source functions
+ syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode
+ parent *Function // enclosing function if anon; nil if global
+ Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error)
+ Prog *Program // enclosing program
+ Params []*Parameter // function parameters; for methods, includes receiver
+ FreeVars []*FreeVar // free variables whose values must be supplied by closure
+ Locals []*Alloc // local variables of this function
+ Blocks []*BasicBlock // basic blocks of the function; nil => external
+ Recover *BasicBlock // optional; control transfers here after recovered panic
+ AnonFuncs []*Function // anonymous functions directly beneath this one
+ referrers []Instruction // referring instructions (iff Parent() != nil)
+
+ // The following fields are set transiently during building,
+ // then cleared.
+ currentBlock *BasicBlock // where to emit code
+ objects map[types.Object]Value // addresses of local variables
+ namedResults []*Alloc // tuple of named results
+ targets *targets // linked stack of branch targets
+ lblocks map[*ast.Object]*lblock // labelled blocks
+}
+
+// BasicBlock represents an SSA basic block.
+//
+// The final element of Instrs is always an explicit transfer of
+// control (If, Jump, Return, or Panic).
+//
+// A block may contain no Instructions only if it is unreachable,
+// i.e., Preds is nil. Empty blocks are typically pruned.
+//
+// BasicBlocks and their Preds/Succs relation form a (possibly cyclic)
+// graph independent of the SSA Value graph: the control-flow graph or
+// CFG. It is illegal for multiple edges to exist between the same
+// pair of blocks.
+//
+// Each BasicBlock is also a node in the dominator tree of the CFG.
+// The tree may be navigated using Idom()/Dominees() and queried using
+// Dominates().
+//
+// The order of Preds and Succs is significant (to Phi and If
+// instructions, respectively).
+//
+type BasicBlock struct {
+ Index int // index of this block within Parent().Blocks
+ Comment string // optional label; no semantic significance
+ parent *Function // parent function
+ Instrs []Instruction // instructions in order
+ Preds, Succs []*BasicBlock // predecessors and successors
+ succs2 [2]*BasicBlock // initial space for Succs
+ dom domInfo // dominator tree info
+ gaps int // number of nil Instrs (transient)
+ rundefers int // number of rundefers (transient)
+}
+
+// Pure values ----------------------------------------
+
+// A FreeVar represents a free variable of the function to which it
+// belongs.
+//
+// FreeVars are used to implement anonymous functions, whose free
+// variables are lexically captured in a closure formed by
+// MakeClosure. The value of such a free var is an Alloc or another
+// FreeVar and is considered a potentially escaping heap address, with
+// pointer type.
+//
+// FreeVars are also used to implement bound method closures. Such a
+// free var represents the receiver value and may be of any type that
+// has concrete methods.
+//
+// Pos() returns the position of the value that was captured, which
+// belongs to an enclosing function.
+//
+type FreeVar struct {
+ name string
+ typ types.Type
+ pos token.Pos
+ parent *Function
+ referrers []Instruction
+
+ // Transiently needed during building.
+ outer Value // the Value captured from the enclosing context.
+}
+
+// A Parameter represents an input parameter of a function.
+//
+type Parameter struct {
+ name string
+ object types.Object // a *types.Var; nil for non-source locals
+ typ types.Type
+ pos token.Pos
+ parent *Function
+ referrers []Instruction
+}
+
+// A Const represents the value of a constant expression.
+//
+// The underlying type of a constant may be any boolean, numeric, or
+// string type. In addition, a Const may represent the nil value of
+// any reference type---interface, map, channel, pointer, slice, or
+// function---but not "untyped nil".
+//
+// All source-level constant expressions are represented by a Const
+// of the same type and value.
+//
+// Value holds the exact value of the constant, independent of its
+// Type(), using the same representation as package go/exact uses for
+// constants, or nil for a typed nil value.
+//
+// Pos() returns token.NoPos.
+//
+// Example printed form:
+// 42:int
+// "hello":untyped string
+// 3+4i:MyComplex
+//
+type Const struct {
+ typ types.Type
+ Value exact.Value
+}
+
+// A Global is a named Value holding the address of a package-level
+// variable.
+//
+// Pos() returns the position of the ast.ValueSpec.Names[*]
+// identifier.
+//
+type Global struct {
+ name string
+ object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard
+ typ types.Type
+ pos token.Pos
+
+ Pkg *Package
+}
+
+// A Builtin represents a specific use of a built-in function, e.g. len.
+//
+// Builtins are immutable values. Builtins do not have addresses.
+// Builtins can only appear in CallCommon.Func.
+//
+// Name() indicates the function: one of the built-in functions from the
+// Go spec (excluding "make" and "new") or one of these ssa-defined
+// intrinsics:
+//
+// // wrapnilchk returns ptr if non-nil, panics otherwise.
+// // (For use in indirection wrappers.)
+// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T
+//
+// Object() returns a *types.Builtin for built-ins defined by the spec,
+// nil for others.
+//
+// Type() returns a *types.Signature representing the effective
+// signature of the built-in for this call.
+//
+type Builtin struct {
+ name string
+ sig *types.Signature
+}
+
+// Value-defining instructions ----------------------------------------
+
+// The Alloc instruction reserves space for a variable of the given type,
+// zero-initializes it, and yields its address.
+//
+// Alloc values are always addresses, and have pointer types, so the
+// type of the allocated variable is actually
+// Type().Underlying().(*types.Pointer).Elem().
+//
+// If Heap is false, Alloc allocates space in the function's
+// activation record (frame); we refer to an Alloc(Heap=false) as a
+// "local" alloc. Each local Alloc returns the same address each time
+// it is executed within the same activation; the space is
+// re-initialized to zero.
+//
+// If Heap is true, Alloc allocates space in the heap; we
+// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc
+// returns a different address each time it is executed.
+//
+// When Alloc is applied to a channel, map or slice type, it returns
+// the address of an uninitialized (nil) reference of that kind; store
+// the result of MakeSlice, MakeMap or MakeChan in that location to
+// instantiate these types.
+//
+// Pos() returns the ast.CompositeLit.Lbrace for a composite literal,
+// or the ast.CallExpr.Rparen for a call to new() or for a call that
+// allocates a varargs slice.
+//
+// Example printed form:
+// t0 = local int
+// t1 = new int
+//
+type Alloc struct {
+ register
+ Comment string
+ Heap bool
+ index int // dense numbering; for lifting
+}
+
+// The Phi instruction represents an SSA φ-node, which combines values
+// that differ across incoming control-flow edges and yields a new
+// value. Within a block, all φ-nodes must appear before all non-φ
+// nodes.
+//
+// Pos() returns the position of the && or || for short-circuit
+// control-flow joins, or that of the *Alloc for φ-nodes inserted
+// during SSA renaming.
+//
+// Example printed form:
+// t2 = phi [0: t0, 1: t1]
+//
+type Phi struct {
+ register
+ Comment string // a hint as to its purpose
+ Edges []Value // Edges[i] is value for Block().Preds[i]
+}
+
+// The Call instruction represents a function or method call.
+//
+// The Call instruction yields the function result if there is exactly
+// one. Otherwise it returns a tuple, the components of which are
+// accessed via Extract.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.CallExpr.Lparen, if explicit in the source.
+//
+// Example printed form:
+// t2 = println(t0, t1)
+// t4 = t3()
+// t7 = invoke t5.Println(...t6)
+//
+type Call struct {
+ register
+ Call CallCommon
+}
+
+// The BinOp instruction yields the result of binary operation X Op Y.
+//
+// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source.
+//
+// Example printed form:
+// t1 = t0 + 1:int
+//
+type BinOp struct {
+ register
+ // One of:
+ // ADD SUB MUL QUO REM + - * / %
+ // AND OR XOR SHL SHR AND_NOT & | ^ << >> &~
+ // EQL LSS GTR NEQ LEQ GEQ == != < <= < >=
+ Op token.Token
+ X, Y Value
+}
+
+// The UnOp instruction yields the result of Op X.
+// ARROW is channel receive.
+// MUL is pointer indirection (load).
+// XOR is bitwise complement.
+// SUB is negation.
+// NOT is logical negation.
+//
+// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above
+// and a boolean indicating the success of the receive. The
+// components of the tuple are accessed using Extract.
+//
+// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source.
+// For receive operations (ARROW) implicit in ranging over a channel,
+// Pos() returns the ast.RangeStmt.For.
+// For implicit memory loads (STAR), Pos() returns the position of the
+// most closely associated source-level construct; the details are not
+// specified.
+//
+// Example printed form:
+// t0 = *x
+// t2 = <-t1,ok
+//
+type UnOp struct {
+ register
+ Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^
+ X Value
+ CommaOk bool
+}
+
+// The ChangeType instruction applies to X a value-preserving type
+// change to Type().
+//
+// Type changes are permitted:
+// - between a named type and its underlying type.
+// - between two named types of the same underlying type.
+// - between (possibly named) pointers to identical base types.
+// - from a bidirectional channel to a read- or write-channel,
+// optionally adding/removing a name.
+//
+// This operation cannot fail dynamically.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// t1 = changetype *int <- IntPtr (t0)
+//
+type ChangeType struct {
+ register
+ X Value
+}
+
+// The Convert instruction yields the conversion of value X to type
+// Type(). One or both of those types is basic (but possibly named).
+//
+// A conversion may change the value and representation of its operand.
+// Conversions are permitted:
+// - between real numeric types.
+// - between complex numeric types.
+// - between string and []byte or []rune.
+// - between pointers and unsafe.Pointer.
+// - between unsafe.Pointer and uintptr.
+// - from (Unicode) integer to (UTF-8) string.
+// A conversion may imply a type name change also.
+//
+// This operation cannot fail dynamically.
+//
+// Conversions of untyped string/number/bool constants to a specific
+// representation are eliminated during SSA construction.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// t1 = convert []byte <- string (t0)
+//
+type Convert struct {
+ register
+ X Value
+}
+
+// ChangeInterface constructs a value of one interface type from a
+// value of another interface type known to be assignable to it.
+// This operation cannot fail.
+//
+// Pos() returns the ast.CallExpr.Lparen if the instruction arose from
+// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the
+// instruction arose from an explicit e.(T) operation; or token.NoPos
+// otherwise.
+//
+// Example printed form:
+// t1 = change interface interface{} <- I (t0)
+//
+type ChangeInterface struct {
+ register
+ X Value
+}
+
+// MakeInterface constructs an instance of an interface type from a
+// value of a concrete type.
+//
+// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set
+// of X, and Program.Method(m) to find the implementation of a method.
+//
+// To construct the zero value of an interface type T, use:
+// NewConst(exact.MakeNil(), T, pos)
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// t1 = make interface{} <- int (42:int)
+// t2 = make Stringer <- t0
+//
+type MakeInterface struct {
+ register
+ X Value
+}
+
+// The MakeClosure instruction yields a closure value whose code is
+// Fn and whose free variables' values are supplied by Bindings.
+//
+// Type() returns a (possibly named) *types.Signature.
+//
+// Pos() returns the ast.FuncLit.Type.Func for a function literal
+// closure or the ast.SelectorExpr.Sel for a bound method closure.
+//
+// Example printed form:
+// t0 = make closure anon@1.2 [x y z]
+// t1 = make closure bound$(main.I).add [i]
+//
+type MakeClosure struct {
+ register
+ Fn Value // always a *Function
+ Bindings []Value // values for each free variable in Fn.FreeVars
+}
+
+// The MakeMap instruction creates a new hash-table-based map object
+// and yields a value of kind map.
+//
+// Type() returns a (possibly named) *types.Map.
+//
+// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or
+// the ast.CompositeLit.Lbrack if created by a literal.
+//
+// Example printed form:
+// t1 = make map[string]int t0
+// t1 = make StringIntMap t0
+//
+type MakeMap struct {
+ register
+ Reserve Value // initial space reservation; nil => default
+}
+
+// The MakeChan instruction creates a new channel object and yields a
+// value of kind chan.
+//
+// Type() returns a (possibly named) *types.Chan.
+//
+// Pos() returns the ast.CallExpr.Lparen for the make(chan) that
+// created it.
+//
+// Example printed form:
+// t0 = make chan int 0
+// t0 = make IntChan 0
+//
+type MakeChan struct {
+ register
+ Size Value // int; size of buffer; zero => synchronous.
+}
+
+// The MakeSlice instruction yields a slice of length Len backed by a
+// newly allocated array of length Cap.
+//
+// Both Len and Cap must be non-nil Values of integer type.
+//
+// (Alloc(types.Array) followed by Slice will not suffice because
+// Alloc can only create arrays of constant length.)
+//
+// Type() returns a (possibly named) *types.Slice.
+//
+// Pos() returns the ast.CallExpr.Lparen for the make([]T) that
+// created it.
+//
+// Example printed form:
+// t1 = make []string 1:int t0
+// t1 = make StringSlice 1:int t0
+//
+type MakeSlice struct {
+ register
+ Len Value
+ Cap Value
+}
+
+// The Slice instruction yields a slice of an existing string, slice
+// or *array X between optional integer bounds Low and High.
+//
+// Dynamically, this instruction panics if X evaluates to a nil *array
+// pointer.
+//
+// Type() returns string if the type of X was string, otherwise a
+// *types.Slice with the same element type as X.
+//
+// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice
+// operation, the ast.CompositeLit.Lbrace if created by a literal, or
+// NoPos if not explicit in the source (e.g. a variadic argument slice).
+//
+// Example printed form:
+// t1 = slice t0[1:]
+//
+type Slice struct {
+ register
+ X Value // slice, string, or *array
+ Low, High, Max Value // each may be nil
+}
+
+// The FieldAddr instruction yields the address of Field of *struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X.
+//
+// Dynamically, this instruction panics if X evaluates to a nil
+// pointer.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Pos() returns the position of the ast.SelectorExpr.Sel for the
+// field, if explicit in the source.
+//
+// Example printed form:
+// t1 = &t0.name [#1]
+//
+type FieldAddr struct {
+ register
+ X Value // *struct
+ Field int // index into X.Type().Deref().(*types.Struct).Fields
+}
+
+// The Field instruction yields the Field of struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X; by using numeric indices we avoid ambiguity of
+// package-local identifiers and permit compact representations.
+//
+// Pos() returns the position of the ast.SelectorExpr.Sel for the
+// field, if explicit in the source.
+//
+// Example printed form:
+// t1 = t0.name [#1]
+//
+type Field struct {
+ register
+ X Value // struct
+ Field int // index into X.Type().(*types.Struct).Fields
+}
+
+// The IndexAddr instruction yields the address of the element at
+// index Index of collection X. Index is an integer expression.
+//
+// The elements of maps and strings are not addressable; use Lookup or
+// MapUpdate instead.
+//
+// Dynamically, this instruction panics if X evaluates to a nil *array
+// pointer.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
+// explicit in the source.
+//
+// Example printed form:
+// t2 = &t0[t1]
+//
+type IndexAddr struct {
+ register
+ X Value // slice or *array,
+ Index Value // numeric index
+}
+
+// The Index instruction yields element Index of array X.
+//
+// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
+// explicit in the source.
+//
+// Example printed form:
+// t2 = t0[t1]
+//
+type Index struct {
+ register
+ X Value // array
+ Index Value // integer index
+}
+
+// The Lookup instruction yields element Index of collection X, a map
+// or string. Index is an integer expression if X is a string or the
+// appropriate key type if X is a map.
+//
+// If CommaOk, the result is a 2-tuple of the value above and a
+// boolean indicating the result of a map membership test for the key.
+// The components of the tuple are accessed using Extract.
+//
+// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source.
+//
+// Example printed form:
+// t2 = t0[t1]
+// t5 = t3[t4],ok
+//
+type Lookup struct {
+ register
+ X Value // string or map
+ Index Value // numeric or key-typed index
+ CommaOk bool // return a value,ok pair
+}
+
+// SelectState is a helper for Select.
+// It represents one goal state and its corresponding communication.
+//
+type SelectState struct {
+ Dir types.ChanDir // direction of case (SendOnly or RecvOnly)
+ Chan Value // channel to use (for send or receive)
+ Send Value // value to send (for send)
+ Pos token.Pos // position of token.ARROW
+ DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode]
+}
+
+// The Select instruction tests whether (or blocks until) one
+// of the specified sent or received states is entered.
+//
+// Let n be the number of States for which Dir==RECV and T_i (0<=i<n)
+// be the element type of each such state's Chan.
+// Select returns an n+2-tuple
+// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1)
+// The tuple's components, described below, must be accessed via the
+// Extract instruction.
+//
+// If Blocking, select waits until exactly one state holds, i.e. a
+// channel becomes ready for the designated operation of sending or
+// receiving; select chooses one among the ready states
+// pseudorandomly, performs the send or receive operation, and sets
+// 'index' to the index of the chosen channel.
+//
+// If !Blocking, select doesn't block if no states hold; instead it
+// returns immediately with index equal to -1.
+//
+// If the chosen channel was used for a receive, the r_i component is
+// set to the received value, where i is the index of that state among
+// all n receive states; otherwise r_i has the zero value of type T_i.
+// Note that the receive index i is not the same as the state
+// index index.
+//
+// The second component of the triple, recvOk, is a boolean whose value
+// is true iff the selected operation was a receive and the receive
+// successfully yielded a value.
+//
+// Pos() returns the ast.SelectStmt.Select.
+//
+// Example printed form:
+// t3 = select nonblocking [<-t0, t1<-t2]
+// t4 = select blocking []
+//
+type Select struct {
+ register
+ States []*SelectState
+ Blocking bool
+}
+
+// The Range instruction yields an iterator over the domain and range
+// of X, which must be a string or map.
+//
+// Elements are accessed via Next.
+//
+// Type() returns an opaque and degenerate "rangeIter" type.
+//
+// Pos() returns the ast.RangeStmt.For.
+//
+// Example printed form:
+// t0 = range "hello":string
+//
+type Range struct {
+ register
+ X Value // string or map
+}
+
+// The Next instruction reads and advances the (map or string)
+// iterator Iter and returns a 3-tuple value (ok, k, v). If the
+// iterator is not exhausted, ok is true and k and v are the next
+// elements of the domain and range, respectively. Otherwise ok is
+// false and k and v are undefined.
+//
+// Components of the tuple are accessed using Extract.
+//
+// The IsString field distinguishes iterators over strings from those
+// over maps, as the Type() alone is insufficient: consider
+// map[int]rune.
+//
+// Type() returns a *types.Tuple for the triple (ok, k, v).
+// The types of k and/or v may be types.Invalid.
+//
+// Example printed form:
+// t1 = next t0
+//
+type Next struct {
+ register
+ Iter Value
+ IsString bool // true => string iterator; false => map iterator.
+}
+
+// The TypeAssert instruction tests whether interface value X has type
+// AssertedType.
+//
+// If !CommaOk, on success it returns v, the result of the conversion
+// (defined below); on failure it panics.
+//
+// If CommaOk: on success it returns a pair (v, true) where v is the
+// result of the conversion; on failure it returns (z, false) where z
+// is AssertedType's zero value. The components of the pair must be
+// accessed using the Extract instruction.
+//
+// If AssertedType is a concrete type, TypeAssert checks whether the
+// dynamic type in interface X is equal to it, and if so, the result
+// of the conversion is a copy of the value in the interface.
+//
+// If AssertedType is an interface, TypeAssert checks whether the
+// dynamic type of the interface is assignable to it, and if so, the
+// result of the conversion is a copy of the interface value X.
+// If AssertedType is a superinterface of X.Type(), the operation will
+// fail iff the operand is nil. (Contrast with ChangeInterface, which
+// performs no nil-check.)
+//
+// Type() reflects the actual type of the result, possibly a
+// 2-types.Tuple; AssertedType is the asserted type.
+//
+// Pos() returns the ast.CallExpr.Lparen if the instruction arose from
+// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the
+// instruction arose from an explicit e.(T) operation; or the
+// ast.CaseClause.Case if the instruction arose from a case of a
+// type-switch statement.
+//
+// Example printed form:
+// t1 = typeassert t0.(int)
+// t3 = typeassert,ok t2.(T)
+//
+type TypeAssert struct {
+ register
+ X Value
+ AssertedType types.Type
+ CommaOk bool
+}
+
+// The Extract instruction yields component Index of Tuple.
+//
+// This is used to access the results of instructions with multiple
+// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and
+// IndexExpr(Map).
+//
+// Example printed form:
+// t1 = extract t0 #1
+//
+type Extract struct {
+ register
+ Tuple Value
+ Index int
+}
+
+// Instructions executed for effect. They do not yield a value. --------------------
+
+// The Jump instruction transfers control to the sole successor of its
+// owning block.
+//
+// A Jump must be the last instruction of its containing BasicBlock.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// jump done
+//
+type Jump struct {
+ anInstruction
+}
+
+// The If instruction transfers control to one of the two successors
+// of its owning block, depending on the boolean Cond: the first if
+// true, the second if false.
+//
+// An If instruction must be the last instruction of its containing
+// BasicBlock.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// if t0 goto done else body
+//
+type If struct {
+ anInstruction
+ Cond Value
+}
+
+// The Return instruction returns values and control back to the calling
+// function.
+//
+// len(Results) is always equal to the number of results in the
+// function's signature.
+//
+// If len(Results) > 1, Return returns a tuple value with the specified
+// components which the caller must access using Extract instructions.
+//
+// There is no instruction to return a ready-made tuple like those
+// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or
+// a tail-call to a function with multiple result parameters.
+//
+// Return must be the last instruction of its containing BasicBlock.
+// Such a block has no successors.
+//
+// Pos() returns the ast.ReturnStmt.Return, if explicit in the source.
+//
+// Example printed form:
+// return
+// return nil:I, 2:int
+//
+type Return struct {
+ anInstruction
+ Results []Value
+ pos token.Pos
+}
+
+// The RunDefers instruction pops and invokes the entire stack of
+// procedure calls pushed by Defer instructions in this function.
+//
+// It is legal to encounter multiple 'rundefers' instructions in a
+// single control-flow path through a function; this is useful in
+// the combined init() function, for example.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// rundefers
+//
+type RunDefers struct {
+ anInstruction
+}
+
+// The Panic instruction initiates a panic with value X.
+//
+// A Panic instruction must be the last instruction of its containing
+// BasicBlock, which must have no successors.
+//
+// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction;
+// they are treated as calls to a built-in function.
+//
+// Pos() returns the ast.CallExpr.Lparen if this panic was explicit
+// in the source.
+//
+// Example printed form:
+// panic t0
+//
+type Panic struct {
+ anInstruction
+ X Value // an interface{}
+ pos token.Pos
+}
+
+// The Go instruction creates a new goroutine and calls the specified
+// function within it.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.GoStmt.Go.
+//
+// Example printed form:
+// go println(t0, t1)
+// go t3()
+// go invoke t5.Println(...t6)
+//
+type Go struct {
+ anInstruction
+ Call CallCommon
+ pos token.Pos
+}
+
+// The Defer instruction pushes the specified call onto a stack of
+// functions to be called by a RunDefers instruction or by a panic.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.DeferStmt.Defer.
+//
+// Example printed form:
+// defer println(t0, t1)
+// defer t3()
+// defer invoke t5.Println(...t6)
+//
+type Defer struct {
+ anInstruction
+ Call CallCommon
+ pos token.Pos
+}
+
+// The Send instruction sends X on channel Chan.
+//
+// Pos() returns the ast.SendStmt.Arrow, if explicit in the source.
+//
+// Example printed form:
+// send t0 <- t1
+//
+type Send struct {
+ anInstruction
+ Chan, X Value
+ pos token.Pos
+}
+
+// The Store instruction stores Val at address Addr.
+// Stores can be of arbitrary types.
+//
+// Pos() returns the position of the source-level construct most closely
+// associated with the memory store operation.
+// Since implicit memory stores are numerous and varied and depend upon
+// implementation choices, the details are not specified.
+//
+// Example printed form:
+// *x = y
+//
+type Store struct {
+ anInstruction
+ Addr Value
+ Val Value
+ pos token.Pos
+}
+
+// The MapUpdate instruction updates the association of Map[Key] to
+// Value.
+//
+// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack,
+// if explicit in the source.
+//
+// Example printed form:
+// t0[t1] = t2
+//
+type MapUpdate struct {
+ anInstruction
+ Map Value
+ Key Value
+ Value Value
+ pos token.Pos
+}
+
+// A DebugRef instruction maps a source-level expression Expr to the
+// SSA value X that represents the value (!IsAddr) or address (IsAddr)
+// of that expression.
+//
+// DebugRef is a pseudo-instruction: it has no dynamic effect.
+//
+// Pos() returns Expr.Pos(), the start position of the source-level
+// expression. This is not the same as the "designated" token as
+// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the
+// position of the ("designated") Lparen token.
+//
+// If Expr is an *ast.Ident denoting a var or func, Object() returns
+// the object; though this information can be obtained from the type
+// checker, including it here greatly facilitates debugging.
+// For non-Ident expressions, Object() returns nil.
+//
+// DebugRefs are generated only for functions built with debugging
+// enabled; see Package.SetDebugMode() and the GlobalDebug builder
+// mode flag.
+//
+// DebugRefs are not emitted for ast.Idents referring to constants or
+// predeclared identifiers, since they are trivial and numerous.
+// Nor are they emitted for ast.ParenExprs.
+//
+// (By representing these as instructions, rather than out-of-band,
+// consistency is maintained during transformation passes by the
+// ordinary SSA renaming machinery.)
+//
+// Example printed form:
+// ; *ast.CallExpr @ 102:9 is t5
+// ; var x float64 @ 109:72 is x
+// ; address of *ast.CompositeLit @ 216:10 is t0
+//
+type DebugRef struct {
+ anInstruction
+ Expr ast.Expr // the referring expression (never *ast.ParenExpr)
+ object types.Object // the identity of the source var/func
+ IsAddr bool // Expr is addressable and X is the address it denotes
+ X Value // the value or address of Expr
+}
+
+// Embeddable mix-ins and helpers for common parts of other structs. -----------
+
+// register is a mix-in embedded by all SSA values that are also
+// instructions, i.e. virtual registers, and provides a uniform
+// implementation of most of the Value interface: Value.Name() is a
+// numbered register (e.g. "t0"); the other methods are field accessors.
+//
+// Temporary names are automatically assigned to each register on
+// completion of building a function in SSA form.
+//
+// Clients must not assume that the 'id' value (and the Name() derived
+// from it) is unique within a function. As always in this API,
+// semantics are determined only by identity; names exist only to
+// facilitate debugging.
+//
+type register struct {
+ anInstruction
+ num int // "name" of virtual register, e.g. "t0". Not guaranteed unique.
+ typ types.Type // type of virtual register
+ pos token.Pos // position of source expression, or NoPos
+ referrers []Instruction
+}
+
+// anInstruction is a mix-in embedded by all Instructions.
+// It provides the implementations of the Block and setBlock methods.
+type anInstruction struct {
+ block *BasicBlock // the basic block of this instruction
+}
+
+// CallCommon is contained by Go, Defer and Call to hold the
+// common parts of a function or method call.
+//
+// Each CallCommon exists in one of two modes, function call and
+// interface method invocation, or "call" and "invoke" for short.
+//
+// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon
+// represents an ordinary function call of the value in Value,
+// which may be a *Builtin, a *Function or any other value of kind
+// 'func'.
+//
+// Value may be one of:
+// (a) a *Function, indicating a statically dispatched call
+// to a package-level function, an anonymous function, or
+// a method of a named type.
+// (b) a *MakeClosure, indicating an immediately applied
+// function literal with free variables.
+// (c) a *Builtin, indicating a statically dispatched call
+// to a built-in function.
+// (d) any other value, indicating a dynamically dispatched
+// function call.
+// StaticCallee returns the identity of the callee in cases
+// (a) and (b), nil otherwise.
+//
+// Args contains the arguments to the call. If Value is a method,
+// Args[0] contains the receiver parameter.
+//
+// Example printed form:
+// t2 = println(t0, t1)
+// go t3()
+// defer t5(...t6)
+//
+// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon
+// represents a dynamically dispatched call to an interface method.
+// In this mode, Value is the interface value and Method is the
+// interface's abstract method. Note: an abstract method may be
+// shared by multiple interfaces due to embedding; Value.Type()
+// provides the specific interface used for this call.
+//
+// Value is implicitly supplied to the concrete method implementation
+// as the receiver parameter; in other words, Args[0] holds not the
+// receiver but the first true argument.
+//
+// Example printed form:
+// t1 = invoke t0.String()
+// go invoke t3.Run(t2)
+// defer invoke t4.Handle(...t5)
+//
+// For all calls to variadic functions (Signature().Variadic()),
+// the last element of Args is a slice.
+//
+type CallCommon struct {
+ Value Value // receiver (invoke mode) or func value (call mode)
+ Method *types.Func // abstract method (invoke mode)
+ Args []Value // actual parameters (in static method call, includes receiver)
+ pos token.Pos // position of CallExpr.Lparen, iff explicit in source
+}
+
+// IsInvoke returns true if this call has "invoke" (not "call") mode.
+func (c *CallCommon) IsInvoke() bool {
+ return c.Method != nil
+}
+
+func (c *CallCommon) Pos() token.Pos { return c.pos }
+
+// Signature returns the signature of the called function.
+//
+// For an "invoke"-mode call, the signature of the interface method is
+// returned.
+//
+// In either "call" or "invoke" mode, if the callee is a method, its
+// receiver is represented by sig.Recv, not sig.Params().At(0).
+//
+func (c *CallCommon) Signature() *types.Signature {
+ if c.Method != nil {
+ return c.Method.Type().(*types.Signature)
+ }
+ return c.Value.Type().Underlying().(*types.Signature)
+}
+
+// StaticCallee returns the callee if this is a trivially static
+// "call"-mode call to a function.
+func (c *CallCommon) StaticCallee() *Function {
+ switch fn := c.Value.(type) {
+ case *Function:
+ return fn
+ case *MakeClosure:
+ return fn.Fn.(*Function)
+ }
+ return nil
+}
+
+// Description returns a description of the mode of this call suitable
+// for a user interface, e.g., "static method call".
+func (c *CallCommon) Description() string {
+ switch fn := c.Value.(type) {
+ case *Builtin:
+ return "built-in function call"
+ case *MakeClosure:
+ return "static function closure call"
+ case *Function:
+ if fn.Signature.Recv() != nil {
+ return "static method call"
+ }
+ return "static function call"
+ }
+ if c.IsInvoke() {
+ return "dynamic method call" // ("invoke" mode)
+ }
+ return "dynamic function call"
+}
+
+// The CallInstruction interface, implemented by *Go, *Defer and *Call,
+// exposes the common parts of function-calling instructions,
+// yet provides a way back to the Value defined by *Call alone.
+//
+type CallInstruction interface {
+ Instruction
+ Common() *CallCommon // returns the common parts of the call
+ Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer)
+}
+
+func (s *Call) Common() *CallCommon { return &s.Call }
+func (s *Defer) Common() *CallCommon { return &s.Call }
+func (s *Go) Common() *CallCommon { return &s.Call }
+
+func (s *Call) Value() *Call { return s }
+func (s *Defer) Value() *Call { return nil }
+func (s *Go) Value() *Call { return nil }
+
+func (v *Builtin) Type() types.Type { return v.sig }
+func (v *Builtin) Name() string { return v.name }
+func (*Builtin) Referrers() *[]Instruction { return nil }
+func (v *Builtin) Pos() token.Pos { return token.NoPos }
+func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) }
+func (v *Builtin) Parent() *Function { return nil }
+
+func (v *FreeVar) Type() types.Type { return v.typ }
+func (v *FreeVar) Name() string { return v.name }
+func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers }
+func (v *FreeVar) Pos() token.Pos { return v.pos }
+func (v *FreeVar) Parent() *Function { return v.parent }
+
+func (v *Global) Type() types.Type { return v.typ }
+func (v *Global) Name() string { return v.name }
+func (v *Global) Parent() *Function { return nil }
+func (v *Global) Pos() token.Pos { return v.pos }
+func (v *Global) Referrers() *[]Instruction { return nil }
+func (v *Global) Token() token.Token { return token.VAR }
+func (v *Global) Object() types.Object { return v.object }
+func (v *Global) String() string { return v.RelString(nil) }
+func (v *Global) Package() *Package { return v.Pkg }
+func (v *Global) RelString(from *types.Package) string { return relString(v, from) }
+
+func (v *Function) Name() string { return v.name }
+func (v *Function) Type() types.Type { return v.Signature }
+func (v *Function) Pos() token.Pos { return v.pos }
+func (v *Function) Token() token.Token { return token.FUNC }
+func (v *Function) Object() types.Object { return v.object }
+func (v *Function) String() string { return v.RelString(nil) }
+func (v *Function) Package() *Package { return v.Pkg }
+func (v *Function) Parent() *Function { return v.parent }
+func (v *Function) Referrers() *[]Instruction {
+ if v.parent != nil {
+ return &v.referrers
+ }
+ return nil
+}
+
+func (v *Parameter) Type() types.Type { return v.typ }
+func (v *Parameter) Name() string { return v.name }
+func (v *Parameter) Object() types.Object { return v.object }
+func (v *Parameter) Referrers() *[]Instruction { return &v.referrers }
+func (v *Parameter) Pos() token.Pos { return v.pos }
+func (v *Parameter) Parent() *Function { return v.parent }
+
+func (v *Alloc) Type() types.Type { return v.typ }
+func (v *Alloc) Referrers() *[]Instruction { return &v.referrers }
+func (v *Alloc) Pos() token.Pos { return v.pos }
+
+func (v *register) Type() types.Type { return v.typ }
+func (v *register) setType(typ types.Type) { v.typ = typ }
+func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) }
+func (v *register) setNum(num int) { v.num = num }
+func (v *register) Referrers() *[]Instruction { return &v.referrers }
+func (v *register) Pos() token.Pos { return v.pos }
+func (v *register) setPos(pos token.Pos) { v.pos = pos }
+
+func (v *anInstruction) Parent() *Function { return v.block.parent }
+func (v *anInstruction) Block() *BasicBlock { return v.block }
+func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block }
+func (v *anInstruction) Referrers() *[]Instruction { return nil }
+
+func (t *Type) Name() string { return t.object.Name() }
+func (t *Type) Pos() token.Pos { return t.object.Pos() }
+func (t *Type) Type() types.Type { return t.object.Type() }
+func (t *Type) Token() token.Token { return token.TYPE }
+func (t *Type) Object() types.Object { return t.object }
+func (t *Type) String() string { return t.RelString(nil) }
+func (t *Type) Package() *Package { return t.pkg }
+func (t *Type) RelString(from *types.Package) string { return relString(t, from) }
+
+func (c *NamedConst) Name() string { return c.object.Name() }
+func (c *NamedConst) Pos() token.Pos { return c.object.Pos() }
+func (c *NamedConst) String() string { return c.RelString(nil) }
+func (c *NamedConst) Type() types.Type { return c.object.Type() }
+func (c *NamedConst) Token() token.Token { return token.CONST }
+func (c *NamedConst) Object() types.Object { return c.object }
+func (c *NamedConst) Package() *Package { return c.pkg }
+func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) }
+
+// Func returns the package-level function of the specified name,
+// or nil if not found.
+//
+func (p *Package) Func(name string) (f *Function) {
+ f, _ = p.Members[name].(*Function)
+ return
+}
+
+// Var returns the package-level variable of the specified name,
+// or nil if not found.
+//
+func (p *Package) Var(name string) (g *Global) {
+ g, _ = p.Members[name].(*Global)
+ return
+}
+
+// Const returns the package-level constant of the specified name,
+// or nil if not found.
+//
+func (p *Package) Const(name string) (c *NamedConst) {
+ c, _ = p.Members[name].(*NamedConst)
+ return
+}
+
+// Type returns the package-level type of the specified name,
+// or nil if not found.
+//
+func (p *Package) Type(name string) (t *Type) {
+ t, _ = p.Members[name].(*Type)
+ return
+}
+
+func (v *Call) Pos() token.Pos { return v.Call.pos }
+func (s *Defer) Pos() token.Pos { return s.pos }
+func (s *Go) Pos() token.Pos { return s.pos }
+func (s *MapUpdate) Pos() token.Pos { return s.pos }
+func (s *Panic) Pos() token.Pos { return s.pos }
+func (s *Return) Pos() token.Pos { return s.pos }
+func (s *Send) Pos() token.Pos { return s.pos }
+func (s *Store) Pos() token.Pos { return s.pos }
+func (s *If) Pos() token.Pos { return token.NoPos }
+func (s *Jump) Pos() token.Pos { return token.NoPos }
+func (s *RunDefers) Pos() token.Pos { return token.NoPos }
+func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() }
+
+// Operands.
+
+func (v *Alloc) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *BinOp) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Y)
+}
+
+func (c *CallCommon) Operands(rands []*Value) []*Value {
+ rands = append(rands, &c.Value)
+ for i := range c.Args {
+ rands = append(rands, &c.Args[i])
+ }
+ return rands
+}
+
+func (s *Go) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (s *Call) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (s *Defer) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (v *ChangeInterface) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *ChangeType) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *Convert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *DebugRef) Operands(rands []*Value) []*Value {
+ return append(rands, &s.X)
+}
+
+func (v *Extract) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Tuple)
+}
+
+func (v *Field) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *FieldAddr) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *If) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Cond)
+}
+
+func (v *Index) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (v *IndexAddr) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (*Jump) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *Lookup) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (v *MakeChan) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Size)
+}
+
+func (v *MakeClosure) Operands(rands []*Value) []*Value {
+ rands = append(rands, &v.Fn)
+ for i := range v.Bindings {
+ rands = append(rands, &v.Bindings[i])
+ }
+ return rands
+}
+
+func (v *MakeInterface) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *MakeMap) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Reserve)
+}
+
+func (v *MakeSlice) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Len, &v.Cap)
+}
+
+func (v *MapUpdate) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Map, &v.Key, &v.Value)
+}
+
+func (v *Next) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Iter)
+}
+
+func (s *Panic) Operands(rands []*Value) []*Value {
+ return append(rands, &s.X)
+}
+
+func (v *Phi) Operands(rands []*Value) []*Value {
+ for i := range v.Edges {
+ rands = append(rands, &v.Edges[i])
+ }
+ return rands
+}
+
+func (v *Range) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *Return) Operands(rands []*Value) []*Value {
+ for i := range s.Results {
+ rands = append(rands, &s.Results[i])
+ }
+ return rands
+}
+
+func (*RunDefers) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *Select) Operands(rands []*Value) []*Value {
+ for i := range v.States {
+ rands = append(rands, &v.States[i].Chan, &v.States[i].Send)
+ }
+ return rands
+}
+
+func (s *Send) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Chan, &s.X)
+}
+
+func (v *Slice) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Low, &v.High, &v.Max)
+}
+
+func (s *Store) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Addr, &s.Val)
+}
+
+func (v *TypeAssert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *UnOp) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+// Non-Instruction Values:
+func (v *Builtin) Operands(rands []*Value) []*Value { return rands }
+func (v *FreeVar) Operands(rands []*Value) []*Value { return rands }
+func (v *Const) Operands(rands []*Value) []*Value { return rands }
+func (v *Function) Operands(rands []*Value) []*Value { return rands }
+func (v *Global) Operands(rands []*Value) []*Value { return rands }
+func (v *Parameter) Operands(rands []*Value) []*Value { return rands }
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
new file mode 100644
index 0000000..30b8053
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
@@ -0,0 +1,95 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil
+
+// This file defines utility functions for constructing programs in SSA form.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+)
+
+// CreateProgram returns a new program in SSA form, given a program
+// loaded from source. An SSA package is created for each transitively
+// error-free package of lprog.
+//
+// Code for bodies of functions is not built until Build is called
+// on the result.
+//
+// mode controls diagnostics and checking during SSA construction.
+//
+func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
+ prog := ssa.NewProgram(lprog.Fset, mode)
+
+ for _, info := range lprog.AllPackages {
+ if info.TransitivelyErrorFree {
+ prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
+ }
+ }
+
+ return prog
+}
+
+// BuildPackage builds an SSA program with IR for a single package.
+//
+// It populates pkg by type-checking the specified file ASTs. All
+// dependencies are loaded using the importer specified by tc, which
+// typically loads compiler export data; SSA code cannot be built for
+// those packages. BuildPackage then constructs an ssa.Program with all
+// dependency packages created, and builds and returns the SSA package
+// corresponding to pkg.
+//
+// The caller must have set pkg.Path() to the import path.
+//
+// The operation fails if there were any type-checking or import errors.
+//
+// See ../ssa/example_test.go for an example.
+//
+func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) {
+ if fset == nil {
+ panic("no token.FileSet")
+ }
+ if pkg.Path() == "" {
+ panic("package has no import path")
+ }
+
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
+ return nil, nil, err
+ }
+
+ prog := ssa.NewProgram(fset, mode)
+
+ // Create SSA packages for all imports.
+ // Order is not significant.
+ created := make(map[*types.Package]bool)
+ var createAll func(pkgs []*types.Package)
+ createAll = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !created[p] {
+ created[p] = true
+ prog.CreatePackage(p, nil, nil, true)
+ createAll(p.Imports())
+ }
+ }
+ }
+ createAll(pkg.Imports())
+
+ // Create and build the primary package.
+ ssapkg := prog.CreatePackage(pkg, files, info, false)
+ ssapkg.Build()
+ return ssapkg, info, nil
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load_test.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load_test.go
new file mode 100644
index 0000000..8ccd463
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load_test.go
@@ -0,0 +1,64 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil_test
+
+import (
+ "go/ast"
+ "go/importer"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+ "testing"
+
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+const hello = `package main
+
+import "fmt"
+
+func main() {
+ fmt.Println("Hello, world")
+}
+`
+
+func TestBuildPackage(t *testing.T) {
+ // There is a more substantial test of BuildPackage and the
+ // SSA program it builds in ../ssa/builder_test.go.
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "hello.go", hello, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ pkg := types.NewPackage("hello", "")
+ ssapkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset, pkg, []*ast.File{f}, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if pkg.Name() != "main" {
+ t.Errorf("pkg.Name() = %s, want main", pkg.Name())
+ }
+ if ssapkg.Func("main") == nil {
+ ssapkg.WriteTo(os.Stderr)
+ t.Errorf("ssapkg has no main function")
+ }
+}
+
+func TestBuildPackage_MissingImport(t *testing.T) {
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "bad.go", `package bad; import "missing"`, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ pkg := types.NewPackage("bad", "")
+ ssapkg, _, err := ssautil.BuildPackage(new(types.Config), fset, pkg, []*ast.File{f}, 0)
+ if err == nil || ssapkg != nil {
+ t.Fatal("BuildPackage succeeded unexpectedly")
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go
new file mode 100644
index 0000000..db03bf5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go
@@ -0,0 +1,234 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil
+
+// This file implements discovery of switch and type-switch constructs
+// from low-level control flow.
+//
+// Many techniques exist for compiling a high-level switch with
+// constant cases to efficient machine code. The optimal choice will
+// depend on the data type, the specific case values, the code in the
+// body of each case, and the hardware.
+// Some examples:
+// - a lookup table (for a switch that maps constants to constants)
+// - a computed goto
+// - a binary tree
+// - a perfect hash
+// - a two-level switch (to partition constant strings by their first byte).
+
+import (
+ "bytes"
+ "fmt"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/ssa"
+)
+
+// A ConstCase represents a single constant comparison.
+// It is part of a Switch.
+type ConstCase struct {
+ Block *ssa.BasicBlock // block performing the comparison
+ Body *ssa.BasicBlock // body of the case
+ Value *ssa.Const // case comparand
+}
+
+// A TypeCase represents a single type assertion.
+// It is part of a Switch.
+type TypeCase struct {
+ Block *ssa.BasicBlock // block performing the type assert
+ Body *ssa.BasicBlock // body of the case
+ Type types.Type // case type
+ Binding ssa.Value // value bound by this case
+}
+
+// A Switch is a logical high-level control flow operation
+// (a multiway branch) discovered by analysis of a CFG containing
+// only if/else chains. It is not part of the ssa.Instruction set.
+//
+// One of ConstCases and TypeCases has length >= 2;
+// the other is nil.
+//
+// In a value switch, the list of cases may contain duplicate constants.
+// A type switch may contain duplicate types, or types assignable
+// to an interface type also in the list.
+// TODO(adonovan): eliminate such duplicates.
+//
+type Switch struct {
+ Start *ssa.BasicBlock // block containing start of if/else chain
+ X ssa.Value // the switch operand
+ ConstCases []ConstCase // ordered list of constant comparisons
+ TypeCases []TypeCase // ordered list of type assertions
+ Default *ssa.BasicBlock // successor if all comparisons fail
+}
+
+func (sw *Switch) String() string {
+ // We represent each block by the String() of its
+ // first Instruction, e.g. "print(42:int)".
+ var buf bytes.Buffer
+ if sw.ConstCases != nil {
+ fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name())
+ for _, c := range sw.ConstCases {
+ fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0])
+ }
+ } else {
+ fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name())
+ for _, c := range sw.TypeCases {
+ fmt.Fprintf(&buf, "case %s %s: %s\n",
+ c.Binding.Name(), c.Type, c.Body.Instrs[0])
+ }
+ }
+ if sw.Default != nil {
+ fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0])
+ }
+ fmt.Fprintf(&buf, "}")
+ return buf.String()
+}
+
+// Switches examines the control-flow graph of fn and returns the
+// set of inferred value and type switches. A value switch tests an
+// ssa.Value for equality against two or more compile-time constant
+// values. Switches involving link-time constants (addresses) are
+// ignored. A type switch type-asserts an ssa.Value against two or
+// more types.
+//
+// The switches are returned in dominance order.
+//
+// The resulting switches do not necessarily correspond to uses of the
+// 'switch' keyword in the source: for example, a single source-level
+// switch statement with non-constant cases may result in zero, one or
+// many Switches, one per plural sequence of constant cases.
+// Switches may even be inferred from if/else- or goto-based control flow.
+// (In general, the control flow constructs of the source program
+// cannot be faithfully reproduced from the SSA representation.)
+//
+func Switches(fn *ssa.Function) []Switch {
+ // Traverse the CFG in dominance order, so we don't
+ // enter an if/else-chain in the middle.
+ var switches []Switch
+ seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet
+ for _, b := range fn.DomPreorder() {
+ if x, k := isComparisonBlock(b); x != nil {
+ // Block b starts a switch.
+ sw := Switch{Start: b, X: x}
+ valueSwitch(&sw, k, seen)
+ if len(sw.ConstCases) > 1 {
+ switches = append(switches, sw)
+ }
+ }
+
+ if y, x, T := isTypeAssertBlock(b); y != nil {
+ // Block b starts a type switch.
+ sw := Switch{Start: b, X: x}
+ typeSwitch(&sw, y, T, seen)
+ if len(sw.TypeCases) > 1 {
+ switches = append(switches, sw)
+ }
+ }
+ }
+ return switches
+}
+
+func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) {
+ b := sw.Start
+ x := sw.X
+ for x == sw.X {
+ if seen[b] {
+ break
+ }
+ seen[b] = true
+
+ sw.ConstCases = append(sw.ConstCases, ConstCase{
+ Block: b,
+ Body: b.Succs[0],
+ Value: k,
+ })
+ b = b.Succs[1]
+ if len(b.Instrs) > 2 {
+ // Block b contains not just 'if x == k',
+ // so it may have side effects that
+ // make it unsafe to elide.
+ break
+ }
+ if len(b.Preds) != 1 {
+ // Block b has multiple predecessors,
+ // so it cannot be treated as a case.
+ break
+ }
+ x, k = isComparisonBlock(b)
+ }
+ sw.Default = b
+}
+
+func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) {
+ b := sw.Start
+ x := sw.X
+ for x == sw.X {
+ if seen[b] {
+ break
+ }
+ seen[b] = true
+
+ sw.TypeCases = append(sw.TypeCases, TypeCase{
+ Block: b,
+ Body: b.Succs[0],
+ Type: T,
+ Binding: y,
+ })
+ b = b.Succs[1]
+ if len(b.Instrs) > 4 {
+ // Block b contains not just
+ // {TypeAssert; Extract #0; Extract #1; If}
+ // so it may have side effects that
+ // make it unsafe to elide.
+ break
+ }
+ if len(b.Preds) != 1 {
+ // Block b has multiple predecessors,
+ // so it cannot be treated as a case.
+ break
+ }
+ y, x, T = isTypeAssertBlock(b)
+ }
+ sw.Default = b
+}
+
+// isComparisonBlock returns the operands (v, k) if a block ends with
+// a comparison v==k, where k is a compile-time constant.
+//
+func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) {
+ if n := len(b.Instrs); n >= 2 {
+ if i, ok := b.Instrs[n-1].(*ssa.If); ok {
+ if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL {
+ if k, ok := binop.Y.(*ssa.Const); ok {
+ return binop.X, k
+ }
+ if k, ok := binop.X.(*ssa.Const); ok {
+ return binop.Y, k
+ }
+ }
+ }
+ }
+ return
+}
+
+// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
+// a type assertion "if y, ok := x.(T); ok {".
+//
+func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) {
+ if n := len(b.Instrs); n >= 4 {
+ if i, ok := b.Instrs[n-1].(*ssa.If); ok {
+ if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 {
+ if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b {
+ // hack: relies upon instruction ordering.
+ if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok {
+ return ext0, ta.X, ta.AssertedType
+ }
+ }
+ }
+ }
+ }
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch_test.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch_test.go
new file mode 100644
index 0000000..a47dbef
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/switch_test.go
@@ -0,0 +1,74 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// No testdata on Android.
+
+// +build !android
+
+package ssautil_test
+
+import (
+ "go/parser"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+func TestSwitches(t *testing.T) {
+ conf := loader.Config{ParserMode: parser.ParseComments}
+ f, err := conf.ParseFile("testdata/switches.go", nil)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ conf.CreateFromFiles("main", f)
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ prog := ssautil.CreateProgram(iprog, 0)
+ mainPkg := prog.Package(iprog.Created[0].Pkg)
+ mainPkg.Build()
+
+ for _, mem := range mainPkg.Members {
+ if fn, ok := mem.(*ssa.Function); ok {
+ if fn.Synthetic != "" {
+ continue // e.g. init()
+ }
+ // Each (multi-line) "switch" comment within
+ // this function must match the printed form
+ // of a ConstSwitch.
+ var wantSwitches []string
+ for _, c := range f.Comments {
+ if fn.Syntax().Pos() <= c.Pos() && c.Pos() < fn.Syntax().End() {
+ text := strings.TrimSpace(c.Text())
+ if strings.HasPrefix(text, "switch ") {
+ wantSwitches = append(wantSwitches, text)
+ }
+ }
+ }
+
+ switches := ssautil.Switches(fn)
+ if len(switches) != len(wantSwitches) {
+ t.Errorf("in %s, found %d switches, want %d", fn, len(switches), len(wantSwitches))
+ }
+ for i, sw := range switches {
+ got := sw.String()
+ if i >= len(wantSwitches) {
+ continue
+ }
+ want := wantSwitches[i]
+ if got != want {
+ t.Errorf("in %s, found switch %d: got <<%s>>, want <<%s>>", fn, i, got, want)
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/testdata/switches.go b/vendor/golang.org/x/tools/go/ssa/ssautil/testdata/switches.go
new file mode 100644
index 0000000..8ab4c11
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/testdata/switches.go
@@ -0,0 +1,357 @@
+// +build ignore
+
+package main
+
+// This file is the input to TestSwitches in switch_test.go.
+// Each multiway conditional with constant or type cases (Switch)
+// discovered by Switches is printed, and compared with the
+// comments.
+//
+// The body of each case is printed as the value of its first
+// instruction.
+
+// -------- Value switches --------
+
+func SimpleSwitch(x, y int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: print(23:int)
+ // case 3:int: print(23:int)
+ // case 4:int: print(3:int)
+ // default: x == y
+ // }
+ switch x {
+ case 1:
+ print(1)
+ case 2, 3:
+ print(23)
+ fallthrough
+ case 4:
+ print(3)
+ default:
+ print(4)
+ case y:
+ print(5)
+ }
+ print(6)
+}
+
+func four() int { return 4 }
+
+// A non-constant case makes a switch "impure", but its pure
+// cases form two separate switches.
+func SwitchWithNonConstantCase(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: print(23:int)
+ // case 3:int: print(23:int)
+ // default: four()
+ // }
+
+ // switch x {
+ // case 5:int: print(5:int)
+ // case 6:int: print(6:int)
+ // default: print("done":string)
+ // }
+ switch x {
+ case 1:
+ print(1)
+ case 2, 3:
+ print(23)
+ case four():
+ print(3)
+ case 5:
+ print(5)
+ case 6:
+ print(6)
+ }
+ print("done")
+}
+
+// Switches may be found even where the source
+// program doesn't have a switch statement.
+
+func ImplicitSwitches(x, y int) {
+ // switch x {
+ // case 1:int: print(12:int)
+ // case 2:int: print(12:int)
+ // default: x < 5:int
+ // }
+ if x == 1 || 2 == x || x < 5 {
+ print(12)
+ }
+
+ // switch x {
+ // case 3:int: print(34:int)
+ // case 4:int: print(34:int)
+ // default: x == y
+ // }
+ if x == 3 || 4 == x || x == y {
+ print(34)
+ }
+
+ // Not a switch: no consistent variable.
+ if x == 5 || y == 6 {
+ print(56)
+ }
+
+ // Not a switch: only one constant comparison.
+ if x == 7 || x == y {
+ print(78)
+ }
+}
+
+func IfElseBasedSwitch(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: print(2:int)
+ // default: print("else":string)
+ // }
+ if x == 1 {
+ print(1)
+ } else if x == 2 {
+ print(2)
+ } else {
+ print("else")
+ }
+}
+
+func GotoBasedSwitch(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: print(2:int)
+ // default: print("else":string)
+ // }
+ if x == 1 {
+ goto L1
+ }
+ if x == 2 {
+ goto L2
+ }
+ print("else")
+L1:
+ print(1)
+ goto end
+L2:
+ print(2)
+end:
+}
+
+func SwitchInAForLoop(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: print(2:int)
+ // default: print("head":string)
+ // }
+loop:
+ for {
+ print("head")
+ switch x {
+ case 1:
+ print(1)
+ break loop
+ case 2:
+ print(2)
+ break loop
+ }
+ }
+}
+
+// This case is a switch in a for-loop, both constructed using goto.
+// As before, the default case points back to the block containing the
+// switch, but that's ok.
+func SwitchInAForLoopUsingGoto(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: print(2:int)
+ // default: print("head":string)
+ // }
+loop:
+ print("head")
+ if x == 1 {
+ goto L1
+ }
+ if x == 2 {
+ goto L2
+ }
+ goto loop
+L1:
+ print(1)
+ goto end
+L2:
+ print(2)
+end:
+}
+
+func UnstructuredSwitchInAForLoop(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 2:int: x == 1:int
+ // default: print("end":string)
+ // }
+ for {
+ if x == 1 {
+ print(1)
+ return
+ }
+ if x == 2 {
+ continue
+ }
+ break
+ }
+ print("end")
+}
+
+func CaseWithMultiplePreds(x int) {
+ for {
+ if x == 1 {
+ print(1)
+ return
+ }
+ loop:
+ // This block has multiple predecessors,
+ // so can't be treated as a switch case.
+ if x == 2 {
+ goto loop
+ }
+ break
+ }
+ print("end")
+}
+
+func DuplicateConstantsAreNotEliminated(x int) {
+ // switch x {
+ // case 1:int: print(1:int)
+ // case 1:int: print("1a":string)
+ // case 2:int: print(2:int)
+ // default: return
+ // }
+ if x == 1 {
+ print(1)
+ } else if x == 1 { // duplicate => unreachable
+ print("1a")
+ } else if x == 2 {
+ print(2)
+ }
+}
+
+// Interface values (created by comparisons) are not constants,
+// so ConstSwitch.X is never of interface type.
+func MakeInterfaceIsNotAConstant(x interface{}) {
+ if x == "foo" {
+ print("foo")
+ } else if x == 1 {
+ print(1)
+ }
+}
+
+func ZeroInitializedVarsAreConstants(x int) {
+ // switch x {
+ // case 0:int: print(1:int)
+ // case 2:int: print(2:int)
+ // default: print("end":string)
+ // }
+ var zero int // SSA construction replaces zero with 0
+ if x == zero {
+ print(1)
+ } else if x == 2 {
+ print(2)
+ }
+ print("end")
+}
+
+// -------- Select --------
+
+// NB, potentially fragile reliance on register number.
+func SelectDesugarsToSwitch(ch chan int) {
+ // switch t1 {
+ // case 0:int: extract t0 #2
+ // case 1:int: println(0:int)
+ // case 2:int: println(1:int)
+ // default: println("default":string)
+ // }
+ select {
+ case x := <-ch:
+ println(x)
+ case <-ch:
+ println(0)
+ case ch <- 1:
+ println(1)
+ default:
+ println("default")
+ }
+}
+
+// NB, potentially fragile reliance on register number.
+func NonblockingSelectDefaultCasePanics(ch chan int) {
+ // switch t1 {
+ // case 0:int: extract t0 #2
+ // case 1:int: println(0:int)
+ // case 2:int: println(1:int)
+ // default: make interface{} <- string ("blocking select m...":string)
+ // }
+ select {
+ case x := <-ch:
+ println(x)
+ case <-ch:
+ println(0)
+ case ch <- 1:
+ println(1)
+ }
+}
+
+// -------- Type switches --------
+
+// NB, reliance on fragile register numbering.
+func SimpleTypeSwitch(x interface{}) {
+ // switch x.(type) {
+ // case t3 int: println(x)
+ // case t7 bool: println(x)
+ // case t10 string: println(t10)
+ // default: println(x)
+ // }
+ switch y := x.(type) {
+ case nil:
+ println(y)
+ case int, bool:
+ println(y)
+ case string:
+ println(y)
+ default:
+ println(y)
+ }
+}
+
+// NB, potentially fragile reliance on register number.
+func DuplicateTypesAreNotEliminated(x interface{}) {
+ // switch x.(type) {
+ // case t1 string: println(1:int)
+ // case t5 interface{}: println(t5)
+ // case t9 int: println(3:int)
+ // default: return
+ // }
+ switch y := x.(type) {
+ case string:
+ println(1)
+ case interface{}:
+ println(y)
+ case int:
+ println(3) // unreachable!
+ }
+}
+
+// NB, potentially fragile reliance on register number.
+func AdHocTypeSwitch(x interface{}) {
+ // switch x.(type) {
+ // case t1 int: println(t1)
+ // case t5 string: println(t5)
+ // default: print("default":string)
+ // }
+ if i, ok := x.(int); ok {
+ println(i)
+ } else if s, ok := x.(string); ok {
+ println(s)
+ } else {
+ print("default")
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go
new file mode 100644
index 0000000..3424e8a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go
@@ -0,0 +1,79 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil // import "golang.org/x/tools/go/ssa/ssautil"
+
+import "golang.org/x/tools/go/ssa"
+
+// This file defines utilities for visiting the SSA representation of
+// a Program.
+//
+// TODO(adonovan): test coverage.
+
+// AllFunctions finds and returns the set of functions potentially
+// needed by program prog, as determined by a simple linker-style
+// reachability algorithm starting from the members and method-sets of
+// each package. The result may include anonymous functions and
+// synthetic wrappers.
+//
+// Precondition: all packages are built.
+//
+func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool {
+ visit := visitor{
+ prog: prog,
+ seen: make(map[*ssa.Function]bool),
+ }
+ visit.program()
+ return visit.seen
+}
+
+type visitor struct {
+ prog *ssa.Program
+ seen map[*ssa.Function]bool
+}
+
+func (visit *visitor) program() {
+ for _, pkg := range visit.prog.AllPackages() {
+ for _, mem := range pkg.Members {
+ if fn, ok := mem.(*ssa.Function); ok {
+ visit.function(fn)
+ }
+ }
+ }
+ for _, T := range visit.prog.RuntimeTypes() {
+ mset := visit.prog.MethodSets.MethodSet(T)
+ for i, n := 0, mset.Len(); i < n; i++ {
+ visit.function(visit.prog.MethodValue(mset.At(i)))
+ }
+ }
+}
+
+func (visit *visitor) function(fn *ssa.Function) {
+ if !visit.seen[fn] {
+ visit.seen[fn] = true
+ var buf [10]*ssa.Value // avoid alloc in common case
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ for _, op := range instr.Operands(buf[:0]) {
+ if fn, ok := (*op).(*ssa.Function); ok {
+ visit.function(fn)
+ }
+ }
+ }
+ }
+ }
+}
+
+// MainPackages returns the subset of the specified packages
+// named "main" that define a main function.
+// The result may include synthetic "testmain" packages.
+func MainPackages(pkgs []*ssa.Package) []*ssa.Package {
+ var mains []*ssa.Package
+ for _, pkg := range pkgs {
+ if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil {
+ mains = append(mains, pkg)
+ }
+ }
+ return mains
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/stdlib_test.go b/vendor/golang.org/x/tools/go/ssa/stdlib_test.go
new file mode 100644
index 0000000..f7e0f86
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/stdlib_test.go
@@ -0,0 +1,151 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Incomplete source tree on Android.
+
+// +build !android
+
+package ssa_test
+
+// This file runs the SSA builder in sanity-checking mode on all
+// packages beneath $GOROOT and prints some summary information.
+//
+// Run with "go test -cpu=8 to" set GOMAXPROCS.
+
+import (
+ "go/ast"
+ "go/build"
+ "go/token"
+ "runtime"
+ "testing"
+ "time"
+
+ "golang.org/x/tools/go/buildutil"
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+// Skip the set of packages that transitively depend on
+// cmd/internal/objfile, which uses vendoring,
+// which go/loader does not yet support.
+// TODO(adonovan): add support for vendoring and delete this.
+var skip = map[string]bool{
+ "cmd/addr2line": true,
+ "cmd/internal/objfile": true,
+ "cmd/nm": true,
+ "cmd/objdump": true,
+ "cmd/pprof": true,
+}
+
+func bytesAllocated() uint64 {
+ runtime.GC()
+ var stats runtime.MemStats
+ runtime.ReadMemStats(&stats)
+ return stats.Alloc
+}
+
+func TestStdlib(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping in short mode; too slow (golang.org/issue/14113)")
+ }
+ // Load, parse and type-check the program.
+ t0 := time.Now()
+ alloc0 := bytesAllocated()
+
+ // Load, parse and type-check the program.
+ ctxt := build.Default // copy
+ ctxt.GOPATH = "" // disable GOPATH
+ conf := loader.Config{Build: &ctxt}
+ for _, path := range buildutil.AllPackages(conf.Build) {
+ if skip[path] {
+ continue
+ }
+ conf.ImportWithTests(path)
+ }
+
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load failed: %v", err)
+ }
+
+ t1 := time.Now()
+ alloc1 := bytesAllocated()
+
+ // Create SSA packages.
+ var mode ssa.BuilderMode
+ // Comment out these lines during benchmarking. Approx SSA build costs are noted.
+ mode |= ssa.SanityCheckFunctions // + 2% space, + 4% time
+ mode |= ssa.GlobalDebug // +30% space, +18% time
+ prog := ssautil.CreateProgram(iprog, mode)
+
+ t2 := time.Now()
+
+ // Build SSA.
+ prog.Build()
+
+ t3 := time.Now()
+ alloc3 := bytesAllocated()
+
+ numPkgs := len(prog.AllPackages())
+ if want := 140; numPkgs < want {
+ t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
+ }
+
+ // Keep iprog reachable until after we've measured memory usage.
+ if len(iprog.AllPackages) == 0 {
+ panic("unreachable")
+ }
+
+ allFuncs := ssautil.AllFunctions(prog)
+
+ // Check that all non-synthetic functions have distinct names.
+ // Synthetic wrappers for exported methods should be distinct too,
+ // except for unexported ones (explained at (*Function).RelString).
+ byName := make(map[string]*ssa.Function)
+ for fn := range allFuncs {
+ if fn.Synthetic == "" || ast.IsExported(fn.Name()) {
+ str := fn.String()
+ prev := byName[str]
+ byName[str] = fn
+ if prev != nil {
+ t.Errorf("%s: duplicate function named %s",
+ prog.Fset.Position(fn.Pos()), str)
+ t.Errorf("%s: (previously defined here)",
+ prog.Fset.Position(prev.Pos()))
+ }
+ }
+ }
+
+ // Dump some statistics.
+ var numInstrs int
+ for fn := range allFuncs {
+ for _, b := range fn.Blocks {
+ numInstrs += len(b.Instrs)
+ }
+ }
+
+ // determine line count
+ var lineCount int
+ prog.Fset.Iterate(func(f *token.File) bool {
+ lineCount += f.LineCount()
+ return true
+ })
+
+ // NB: when benchmarking, don't forget to clear the debug +
+ // sanity builder flags for better performance.
+
+ t.Log("GOMAXPROCS: ", runtime.GOMAXPROCS(0))
+ t.Log("#Source lines: ", lineCount)
+ t.Log("Load/parse/typecheck: ", t1.Sub(t0))
+ t.Log("SSA create: ", t2.Sub(t1))
+ t.Log("SSA build: ", t3.Sub(t2))
+
+ // SSA stats:
+ t.Log("#Packages: ", numPkgs)
+ t.Log("#Functions: ", len(allFuncs))
+ t.Log("#Instructions: ", numInstrs)
+ t.Log("#MB AST+types: ", int64(alloc1-alloc0)/1e6)
+ t.Log("#MB SSA: ", int64(alloc3-alloc1)/1e6)
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/testdata/objlookup.go b/vendor/golang.org/x/tools/go/ssa/testdata/objlookup.go
new file mode 100644
index 0000000..1aaa417
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/testdata/objlookup.go
@@ -0,0 +1,160 @@
+//+build ignore
+
+package main
+
+// This file is the input to TestObjValueLookup in source_test.go,
+// which ensures that each occurrence of an ident defining or
+// referring to a func, var or const object can be mapped to its
+// corresponding SSA Value.
+//
+// For every reference to a var object, we use annotations in comments
+// to denote both the expected SSA Value kind, and whether to expect
+// its value (x) or its address (&x).
+//
+// For const and func objects, the results don't vary by reference and
+// are always values not addresses, so no annotations are needed. The
+// declaration is enough.
+
+import "fmt"
+import "os"
+
+type J int
+
+func (*J) method() {}
+
+const globalConst = 0
+
+var globalVar int // &globalVar::Global
+
+func globalFunc() {}
+
+type I interface {
+ interfaceMethod()
+}
+
+type S struct {
+ x int // x::nil
+}
+
+func main() {
+ print(globalVar) // globalVar::UnOp
+ globalVar = 1 // globalVar::Const
+
+ var v0 int = 1 // v0::Const (simple local value spec)
+ if v0 > 0 { // v0::Const
+ v0 = 2 // v0::Const
+ }
+ print(v0) // v0::Phi
+
+ // v1 is captured and thus implicitly address-taken.
+ var v1 int = 1 // v1::Const
+ v1 = 2 // v1::Const
+ fmt.Println(v1) // v1::UnOp (load)
+ f := func(param int) { // f::MakeClosure param::Parameter
+ if y := 1; y > 0 { // y::Const
+ print(v1, param) // v1::UnOp (load) param::Parameter
+ }
+ param = 2 // param::Const
+ println(param) // param::Const
+ }
+
+ f(0) // f::MakeClosure
+
+ var v2 int // v2::Const (implicitly zero-initialized local value spec)
+ print(v2) // v2::Const
+
+ m := make(map[string]int) // m::MakeMap
+
+ // Local value spec with multi-valued RHS:
+ var v3, v4 = m[""] // v3::Extract v4::Extract m::MakeMap
+ print(v3) // v3::Extract
+ print(v4) // v4::Extract
+
+ v3++ // v3::BinOp (assign with op)
+ v3 += 2 // v3::BinOp (assign with op)
+
+ v5, v6 := false, "" // v5::Const v6::Const (defining assignment)
+ print(v5) // v5::Const
+ print(v6) // v6::Const
+
+ var v7 S // &v7::Alloc
+ v7.x = 1 // &v7::Alloc &x::FieldAddr
+ print(v7.x) // &v7::Alloc &x::FieldAddr
+
+ var v8 [1]int // &v8::Alloc
+ v8[0] = 0 // &v8::Alloc
+ print(v8[:]) // &v8::Alloc
+ _ = v8[0] // &v8::Alloc
+ _ = v8[:][0] // &v8::Alloc
+ v8ptr := &v8 // v8ptr::Alloc &v8::Alloc
+ _ = v8ptr[0] // v8ptr::Alloc
+ _ = *v8ptr // v8ptr::Alloc
+
+ v8a := make([]int, 1) // v8a::Slice
+ v8a[0] = 0 // v8a::Slice
+ print(v8a[:]) // v8a::Slice
+
+ v9 := S{} // &v9::Alloc
+
+ v10 := &v9 // v10::Alloc &v9::Alloc
+ _ = v10 // v10::Alloc
+
+ var v11 *J = nil // v11::Const
+ v11.method() // v11::Const
+
+ var v12 J // &v12::Alloc
+ v12.method() // &v12::Alloc (implicitly address-taken)
+
+ // NB, in the following, 'method' resolves to the *types.Func
+ // of (*J).method, so it doesn't help us locate the specific
+ // ssa.Values here: a bound-method closure and a promotion
+ // wrapper.
+ _ = v11.method // v11::Const
+ _ = (*struct{ J }).method // J::nil
+
+ // These vars are not optimised away.
+ if false {
+ v13 := 0 // v13::Const
+ println(v13) // v13::Const
+ }
+
+ switch x := 1; x { // x::Const
+ case v0: // v0::Phi
+ }
+
+ for k, v := range m { // k::Extract v::Extract m::MakeMap
+ _ = k // k::Extract
+ v++ // v::BinOp
+ }
+
+ if y := 0; y > 1 { // y::Const y::Const
+ }
+
+ var i interface{} // i::Const (nil interface)
+ i = 1 // i::MakeInterface
+ switch i := i.(type) { // i::MakeInterface i::MakeInterface
+ case int:
+ println(i) // i::Extract
+ }
+
+ ch := make(chan int) // ch::MakeChan
+ select {
+ case x := <-ch: // x::UnOp (receive) ch::MakeChan
+ _ = x // x::UnOp
+ }
+
+ // .Op is an inter-package FieldVal-selection.
+ var err os.PathError // &err::Alloc
+ _ = err.Op // &err::Alloc &Op::FieldAddr
+ _ = &err.Op // &err::Alloc &Op::FieldAddr
+
+ // Exercise corner-cases of lvalues vs rvalues.
+ // (Guessing IsAddr from the 'pointerness' won't cut it here.)
+ type N *N
+ var n N // n::Const
+ n1 := n // n1::Const n::Const
+ n2 := &n1 // n2::Alloc &n1::Alloc
+ n3 := *n2 // n3::UnOp n2::Alloc
+ n4 := **n3 // n4::UnOp n3::UnOp
+ _ = n4 // n4::UnOp
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/testdata/structconv.go b/vendor/golang.org/x/tools/go/ssa/testdata/structconv.go
new file mode 100644
index 0000000..3126469
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/testdata/structconv.go
@@ -0,0 +1,24 @@
+//+build ignore
+
+// This file is the input to TestValueForExprStructConv in identical_test.go,
+// which uses the same framework as TestValueForExpr does in source_test.go.
+//
+// In Go 1.8, struct conversions are permitted even when the struct types have
+// different tags. This wasn't permitted in earlier versions of Go, so this file
+// exists separately from valueforexpr.go to just test this behavior in Go 1.8
+// and later.
+
+package main
+
+type t1 struct {
+ x int
+}
+type t2 struct {
+ x int `tag`
+}
+
+func main() {
+ var tv1 t1
+ var tv2 t2 = /*@ChangeType*/ (t2(tv1))
+ _ = tv2
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/testdata/valueforexpr.go b/vendor/golang.org/x/tools/go/ssa/testdata/valueforexpr.go
new file mode 100644
index 0000000..4a2cb85
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/testdata/valueforexpr.go
@@ -0,0 +1,152 @@
+//+build ignore
+
+package main
+
+// This file is the input to TestValueForExpr in source_test.go, which
+// ensures that each expression e immediately following a /*@kind*/(x)
+// annotation, when passed to Function.ValueForExpr(e), returns a
+// non-nil Value of the same type as e and of kind 'kind'.
+
+func f(spilled, unspilled int) {
+ _ = /*@UnOp*/ (spilled)
+ _ = /*@Parameter*/ (unspilled)
+ _ = /*@<nil>*/ (1 + 2) // (constant)
+ i := 0
+
+ f := func() (int, int) { return 0, 0 }
+
+ /*@Call*/ (print( /*@BinOp*/ (i + 1)))
+ _, _ = /*@Call*/ (f())
+ ch := /*@MakeChan*/ (make(chan int))
+ /*@UnOp*/ (<-ch)
+ x := /*@UnOp*/ (<-ch)
+ _ = x
+ select {
+ case /*@Extract*/ (<-ch):
+ case x := /*@Extract*/ (<-ch):
+ _ = x
+ }
+ defer /*@Function*/ (func() {
+ })()
+ go /*@Function*/ (func() {
+ })()
+ y := 0
+ if true && /*@BinOp*/ (bool(y > 0)) {
+ y = 1
+ }
+ _ = /*@Phi*/ (y)
+ map1 := /*@MakeMap*/ (make(map[string]string))
+ _ = map1
+ _ = /*@Slice*/ (make([]int, 0))
+ _ = /*@MakeClosure*/ (func() { print(spilled) })
+
+ sl := []int{}
+ _ = /*@Slice*/ (sl[:0])
+
+ _ = /*@<nil>*/ (new(int)) // optimized away
+ tmp := /*@Alloc*/ (new(int))
+ _ = tmp
+ var iface interface{}
+ _ = /*@TypeAssert*/ (iface.(int))
+ _ = /*@UnOp*/ (sl[0])
+ _ = /*@IndexAddr*/ (&sl[0])
+ _ = /*@Index*/ ([2]int{}[0])
+ var p *int
+ _ = /*@UnOp*/ (*p)
+
+ _ = /*@UnOp*/ (global)
+ /*@UnOp*/ (global)[""] = ""
+ /*@Global*/ (global) = map[string]string{}
+
+ var local t
+ /*UnOp*/ (local.x) = 1
+
+ // Exercise corner-cases of lvalues vs rvalues.
+ type N *N
+ var n N
+ /*@UnOp*/ (n) = /*@UnOp*/ (n)
+ /*@ChangeType*/ (n) = /*@Alloc*/ (&n)
+ /*@UnOp*/ (n) = /*@UnOp*/ (*n)
+ /*@UnOp*/ (n) = /*@UnOp*/ (**n)
+}
+
+func complit() {
+ // Composite literals.
+ // We get different results for
+ // - composite literal as value (e.g. operand to print)
+ // - composite literal initializer for addressable value
+ // - composite literal value assigned to blank var
+
+ // 1. Slices
+ print( /*@Slice*/ ([]int{}))
+ print( /*@Alloc*/ (&[]int{}))
+ print(& /*@Slice*/ ([]int{}))
+
+ sl1 := /*@Slice*/ ([]int{})
+ sl2 := /*@Alloc*/ (&[]int{})
+ sl3 := & /*@Slice*/ ([]int{})
+ _, _, _ = sl1, sl2, sl3
+
+ _ = /*@Slice*/ ([]int{})
+ _ = /*@<nil>*/ (& /*@Slice*/ ([]int{})) // & optimized away
+ _ = & /*@Slice*/ ([]int{})
+
+ // 2. Arrays
+ print( /*@UnOp*/ ([1]int{}))
+ print( /*@Alloc*/ (&[1]int{}))
+ print(& /*@Alloc*/ ([1]int{}))
+
+ arr1 := /*@Alloc*/ ([1]int{})
+ arr2 := /*@Alloc*/ (&[1]int{})
+ arr3 := & /*@Alloc*/ ([1]int{})
+ _, _, _ = arr1, arr2, arr3
+
+ _ = /*@UnOp*/ ([1]int{})
+ _ = /*@Alloc*/ (& /*@Alloc*/ ([1]int{}))
+ _ = & /*@Alloc*/ ([1]int{})
+
+ // 3. Maps
+ type M map[int]int
+ print( /*@MakeMap*/ (M{}))
+ print( /*@Alloc*/ (&M{}))
+ print(& /*@MakeMap*/ (M{}))
+
+ m1 := /*@MakeMap*/ (M{})
+ m2 := /*@Alloc*/ (&M{})
+ m3 := & /*@MakeMap*/ (M{})
+ _, _, _ = m1, m2, m3
+
+ _ = /*@MakeMap*/ (M{})
+ _ = /*@<nil>*/ (& /*@MakeMap*/ (M{})) // & optimized away
+ _ = & /*@MakeMap*/ (M{})
+
+ // 4. Structs
+ print( /*@UnOp*/ (struct{}{}))
+ print( /*@Alloc*/ (&struct{}{}))
+ print(& /*@Alloc*/ (struct{}{}))
+
+ s1 := /*@Alloc*/ (struct{}{})
+ s2 := /*@Alloc*/ (&struct{}{})
+ s3 := & /*@Alloc*/ (struct{}{})
+ _, _, _ = s1, s2, s3
+
+ _ = /*@UnOp*/ (struct{}{})
+ _ = /*@Alloc*/ (& /*@Alloc*/ (struct{}{}))
+ _ = & /*@Alloc*/ (struct{}{})
+}
+
+type t struct{ x int }
+
+// Ensure we can locate methods of named types.
+func (t) f(param int) {
+ _ = /*@Parameter*/ (param)
+}
+
+// Ensure we can locate init functions.
+func init() {
+ m := /*@MakeMap*/ (make(map[string]string))
+ _ = m
+}
+
+// Ensure we can locate variables in initializer expressions.
+var global = /*@MakeMap*/ (make(map[string]string))
diff --git a/vendor/golang.org/x/tools/go/ssa/testmain.go b/vendor/golang.org/x/tools/go/ssa/testmain.go
new file mode 100644
index 0000000..ea232ad
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/testmain.go
@@ -0,0 +1,267 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// CreateTestMainPackage synthesizes a main package that runs all the
+// tests of the supplied packages.
+// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
+//
+// TODO(adonovan): this file no longer needs to live in the ssa package.
+// Move it to ssautil.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/types"
+ "log"
+ "os"
+ "strings"
+ "text/template"
+)
+
+// FindTests returns the Test, Benchmark, and Example functions
+// (as defined by "go test") defined in the specified package,
+// and its TestMain function, if any.
+func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
+ prog := pkg.Prog
+
+ // The first two of these may be nil: if the program doesn't import "testing",
+ // it can't contain any tests, but it may yet contain Examples.
+ var testSig *types.Signature // func(*testing.T)
+ var benchmarkSig *types.Signature // func(*testing.B)
+ var exampleSig = types.NewSignature(nil, nil, nil, false) // func()
+
+ // Obtain the types from the parameters of testing.MainStart.
+ if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
+ mainStart := testingPkg.Func("MainStart")
+ params := mainStart.Signature.Params()
+ testSig = funcField(params.At(1).Type())
+ benchmarkSig = funcField(params.At(2).Type())
+
+ // Does the package define this function?
+ // func TestMain(*testing.M)
+ if f := pkg.Func("TestMain"); f != nil {
+ sig := f.Type().(*types.Signature)
+ starM := mainStart.Signature.Results().At(0).Type() // *testing.M
+ if sig.Results().Len() == 0 &&
+ sig.Params().Len() == 1 &&
+ types.Identical(sig.Params().At(0).Type(), starM) {
+ main = f
+ }
+ }
+ }
+
+ // TODO(adonovan): use a stable order, e.g. lexical.
+ for _, mem := range pkg.Members {
+ if f, ok := mem.(*Function); ok &&
+ ast.IsExported(f.Name()) &&
+ strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") {
+
+ switch {
+ case testSig != nil && isTestSig(f, "Test", testSig):
+ tests = append(tests, f)
+ case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig):
+ benchmarks = append(benchmarks, f)
+ case isTestSig(f, "Example", exampleSig):
+ examples = append(examples, f)
+ default:
+ continue
+ }
+ }
+ }
+ return
+}
+
+// Like isTest, but checks the signature too.
+func isTestSig(f *Function, prefix string, sig *types.Signature) bool {
+ return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig)
+}
+
+// Given the type of one of the three slice parameters of testing.Main,
+// returns the function type.
+func funcField(slice types.Type) *types.Signature {
+ return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature)
+}
+
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+// Plundered from $GOROOT/src/cmd/go/test.go
+func isTest(name, prefix string) bool {
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Test" is ok
+ return true
+ }
+ return ast.IsExported(name[len(prefix):])
+}
+
+// CreateTestMainPackage creates and returns a synthetic "testmain"
+// package for the specified package if it defines tests, benchmarks or
+// executable examples, or nil otherwise. The new package is named
+// "main" and provides a function named "main" that runs the tests,
+// similar to the one that would be created by the 'go test' tool.
+//
+// Subsequent calls to prog.AllPackages include the new package.
+// The package pkg must belong to the program prog.
+func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
+ if pkg.Prog != prog {
+ log.Fatal("Package does not belong to Program")
+ }
+
+ // Template data
+ var data struct {
+ Pkg *Package
+ Tests, Benchmarks, Examples []*Function
+ Main *Function
+ Go18 bool
+ }
+ data.Pkg = pkg
+
+ // Enumerate tests.
+ data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg)
+ if data.Main == nil &&
+ data.Tests == nil && data.Benchmarks == nil && data.Examples == nil {
+ return nil
+ }
+
+ // Synthesize source for testmain package.
+ path := pkg.Pkg.Path() + "$testmain"
+ tmpl := testmainTmpl
+ if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
+ // In Go 1.8, testing.MainStart's first argument is an interface, not a func.
+ data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type())
+ } else {
+ // The program does not import "testing", but FindTests
+ // returned non-nil, which must mean there were Examples
+ // but no Test, Benchmark, or TestMain functions.
+
+ // We'll simply call them from testmain.main; this will
+ // ensure they don't panic, but will not check any
+ // "Output:" comments.
+ // (We should not execute an Example that has no
+ // "Output:" comment, but it's impossible to tell here.)
+ tmpl = examplesOnlyTmpl
+ }
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, data); err != nil {
+ log.Fatalf("internal error expanding template for %s: %v", path, err)
+ }
+ if false { // debugging
+ fmt.Fprintln(os.Stderr, buf.String())
+ }
+
+ // Parse and type-check the testmain package.
+ f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0))
+ if err != nil {
+ log.Fatalf("internal error parsing %s: %v", path, err)
+ }
+ conf := types.Config{
+ DisableUnusedImportCheck: true,
+ Importer: importer{pkg},
+ }
+ files := []*ast.File{f}
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ testmainPkg, err := conf.Check(path, prog.Fset, files, info)
+ if err != nil {
+ log.Fatalf("internal error type-checking %s: %v", path, err)
+ }
+
+ // Create and build SSA code.
+ testmain := prog.CreatePackage(testmainPkg, files, info, false)
+ testmain.SetDebugMode(false)
+ testmain.Build()
+ testmain.Func("main").Synthetic = "test main function"
+ testmain.Func("init").Synthetic = "package initializer"
+ return testmain
+}
+
+// An implementation of types.Importer for an already loaded SSA program.
+type importer struct {
+ pkg *Package // package under test; may be non-importable
+}
+
+func (imp importer) Import(path string) (*types.Package, error) {
+ if p := imp.pkg.Prog.ImportedPackage(path); p != nil {
+ return p.Pkg, nil
+ }
+ if path == imp.pkg.Pkg.Path() {
+ return imp.pkg.Pkg, nil
+ }
+ return nil, fmt.Errorf("not found") // can't happen
+}
+
+var testmainTmpl = template.Must(template.New("testmain").Parse(`
+package main
+
+import "io"
+import "os"
+import "testing"
+import p {{printf "%q" .Pkg.Pkg.Path}}
+
+{{if .Go18}}
+type deps struct{}
+
+func (deps) ImportPath() string { return "" }
+func (deps) MatchString(pat, str string) (bool, error) { return true, nil }
+func (deps) StartCPUProfile(io.Writer) error { return nil }
+func (deps) StartTestLog(io.Writer) {}
+func (deps) StopCPUProfile() {}
+func (deps) StopTestLog() error { return nil }
+func (deps) WriteHeapProfile(io.Writer) error { return nil }
+func (deps) WriteProfileTo(string, io.Writer, int) error { return nil }
+
+var match deps
+{{else}}
+func match(_, _ string) (bool, error) { return true, nil }
+{{end}}
+
+func main() {
+ tests := []testing.InternalTest{
+{{range .Tests}}
+ { {{printf "%q" .Name}}, p.{{.Name}} },
+{{end}}
+ }
+ benchmarks := []testing.InternalBenchmark{
+{{range .Benchmarks}}
+ { {{printf "%q" .Name}}, p.{{.Name}} },
+{{end}}
+ }
+ examples := []testing.InternalExample{
+{{range .Examples}}
+ {Name: {{printf "%q" .Name}}, F: p.{{.Name}}},
+{{end}}
+ }
+ m := testing.MainStart(match, tests, benchmarks, examples)
+{{with .Main}}
+ p.{{.Name}}(m)
+{{else}}
+ os.Exit(m.Run())
+{{end}}
+}
+
+`))
+
+var examplesOnlyTmpl = template.Must(template.New("examples").Parse(`
+package main
+
+import p {{printf "%q" .Pkg.Pkg.Path}}
+
+func main() {
+{{range .Examples}}
+ p.{{.Name}}()
+{{end}}
+}
+`))
diff --git a/vendor/golang.org/x/tools/go/ssa/testmain_test.go b/vendor/golang.org/x/tools/go/ssa/testmain_test.go
new file mode 100644
index 0000000..e24b23b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/testmain_test.go
@@ -0,0 +1,124 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+// Tests of FindTests. CreateTestMainPackage is tested via the interpreter.
+// TODO(adonovan): test the 'pkgs' result from FindTests.
+
+import (
+ "fmt"
+ "sort"
+ "testing"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+func create(t *testing.T, content string) *ssa.Package {
+ var conf loader.Config
+ f, err := conf.ParseFile("foo_test.go", content)
+ if err != nil {
+ t.Fatal(err)
+ }
+ conf.CreateFromFiles("foo", f)
+
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // We needn't call Build.
+ foo := lprog.Package("foo").Pkg
+ return ssautil.CreateProgram(lprog, ssa.SanityCheckFunctions).Package(foo)
+}
+
+func TestFindTests(t *testing.T) {
+ test := `
+package foo
+
+import "testing"
+
+type T int
+
+// Tests:
+func Test(t *testing.T) {}
+func TestA(t *testing.T) {}
+func TestB(t *testing.T) {}
+
+// Not tests:
+func testC(t *testing.T) {}
+func TestD() {}
+func testE(t *testing.T) int { return 0 }
+func (T) Test(t *testing.T) {}
+
+// Benchmarks:
+func Benchmark(*testing.B) {}
+func BenchmarkA(b *testing.B) {}
+func BenchmarkB(*testing.B) {}
+
+// Not benchmarks:
+func benchmarkC(t *testing.T) {}
+func BenchmarkD() {}
+func benchmarkE(t *testing.T) int { return 0 }
+func (T) Benchmark(t *testing.T) {}
+
+// Examples:
+func Example() {}
+func ExampleA() {}
+
+// Not examples:
+func exampleC() {}
+func ExampleD(t *testing.T) {}
+func exampleE() int { return 0 }
+func (T) Example() {}
+`
+ pkg := create(t, test)
+ tests, benchmarks, examples, _ := ssa.FindTests(pkg)
+
+ sort.Sort(funcsByPos(tests))
+ if got, want := fmt.Sprint(tests), "[foo.Test foo.TestA foo.TestB]"; got != want {
+ t.Errorf("FindTests.tests = %s, want %s", got, want)
+ }
+
+ sort.Sort(funcsByPos(benchmarks))
+ if got, want := fmt.Sprint(benchmarks), "[foo.Benchmark foo.BenchmarkA foo.BenchmarkB]"; got != want {
+ t.Errorf("FindTests.benchmarks = %s, want %s", got, want)
+ }
+
+ sort.Sort(funcsByPos(examples))
+ if got, want := fmt.Sprint(examples), "[foo.Example foo.ExampleA]"; got != want {
+ t.Errorf("FindTests examples = %s, want %s", got, want)
+ }
+}
+
+func TestFindTestsTesting(t *testing.T) {
+ test := `
+package foo
+
+// foo does not import "testing", but defines Examples.
+
+func Example() {}
+func ExampleA() {}
+`
+ pkg := create(t, test)
+ tests, benchmarks, examples, _ := ssa.FindTests(pkg)
+ if len(tests) > 0 {
+ t.Errorf("FindTests.tests = %s, want none", tests)
+ }
+ if len(benchmarks) > 0 {
+ t.Errorf("FindTests.benchmarks = %s, want none", benchmarks)
+ }
+ sort.Sort(funcsByPos(examples))
+ if got, want := fmt.Sprint(examples), "[foo.Example foo.ExampleA]"; got != want {
+ t.Errorf("FindTests examples = %s, want %s", got, want)
+ }
+}
+
+type funcsByPos []*ssa.Function
+
+func (p funcsByPos) Len() int { return len(p) }
+func (p funcsByPos) Less(i, j int) bool { return p[i].Pos() < p[j].Pos() }
+func (p funcsByPos) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go
new file mode 100644
index 0000000..ddb1184
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/util.go
@@ -0,0 +1,119 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines a number of miscellaneous utility functions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+//// AST utilities
+
+func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
+
+// isBlankIdent returns true iff e is an Ident with name "_".
+// They have no associated types.Object, and thus no type.
+//
+func isBlankIdent(e ast.Expr) bool {
+ id, ok := e.(*ast.Ident)
+ return ok && id.Name == "_"
+}
+
+//// Type utilities. Some of these belong in go/types.
+
+// isPointer returns true for types whose underlying type is a pointer.
+func isPointer(typ types.Type) bool {
+ _, ok := typ.Underlying().(*types.Pointer)
+ return ok
+}
+
+func isInterface(T types.Type) bool { return types.IsInterface(T) }
+
+// deref returns a pointer's element type; otherwise it returns typ.
+func deref(typ types.Type) types.Type {
+ if p, ok := typ.Underlying().(*types.Pointer); ok {
+ return p.Elem()
+ }
+ return typ
+}
+
+// recvType returns the receiver type of method obj.
+func recvType(obj *types.Func) types.Type {
+ return obj.Type().(*types.Signature).Recv().Type()
+}
+
+// DefaultType returns the default "typed" type for an "untyped" type;
+// it returns the incoming type for all other types. The default type
+// for untyped nil is untyped nil.
+//
+// Exported to ssa/interp.
+//
+// TODO(adonovan): use go/types.DefaultType after 1.8.
+//
+func DefaultType(typ types.Type) types.Type {
+ if t, ok := typ.(*types.Basic); ok {
+ k := t.Kind()
+ switch k {
+ case types.UntypedBool:
+ k = types.Bool
+ case types.UntypedInt:
+ k = types.Int
+ case types.UntypedRune:
+ k = types.Rune
+ case types.UntypedFloat:
+ k = types.Float64
+ case types.UntypedComplex:
+ k = types.Complex128
+ case types.UntypedString:
+ k = types.String
+ }
+ typ = types.Typ[k]
+ }
+ return typ
+}
+
+// logStack prints the formatted "start" message to stderr and
+// returns a closure that prints the corresponding "end" message.
+// Call using 'defer logStack(...)()' to show builder stack on panic.
+// Don't forget trailing parens!
+//
+func logStack(format string, args ...interface{}) func() {
+ msg := fmt.Sprintf(format, args...)
+ io.WriteString(os.Stderr, msg)
+ io.WriteString(os.Stderr, "\n")
+ return func() {
+ io.WriteString(os.Stderr, msg)
+ io.WriteString(os.Stderr, " end\n")
+ }
+}
+
+// newVar creates a 'var' for use in a types.Tuple.
+func newVar(name string, typ types.Type) *types.Var {
+ return types.NewParam(token.NoPos, nil, name, typ)
+}
+
+// anonVar creates an anonymous 'var' for use in a types.Tuple.
+func anonVar(typ types.Type) *types.Var {
+ return newVar("", typ)
+}
+
+var lenResults = types.NewTuple(anonVar(tInt))
+
+// makeLen returns the len builtin specialized to type func(T)int.
+func makeLen(T types.Type) *Builtin {
+ lenParams := types.NewTuple(anonVar(T))
+ return &Builtin{
+ name: "len",
+ sig: types.NewSignature(nil, lenParams, lenResults, false),
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go
new file mode 100644
index 0000000..701dd90
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/wrappers.go
@@ -0,0 +1,294 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines synthesis of Functions that delegate to declared
+// methods; they come in three kinds:
+//
+// (1) wrappers: methods that wrap declared methods, performing
+// implicit pointer indirections and embedded field selections.
+//
+// (2) thunks: funcs that wrap declared methods. Like wrappers,
+// thunks perform indirections and field selections. The thunk's
+// first parameter is used as the receiver for the method call.
+//
+// (3) bounds: funcs that wrap declared methods. The bound's sole
+// free variable, supplied by a closure, is used as the receiver
+// for the method call. No indirections or field selections are
+// performed since they can be done before the call.
+
+import (
+ "fmt"
+
+ "go/types"
+)
+
+// -- wrappers -----------------------------------------------------------
+
+// makeWrapper returns a synthetic method that delegates to the
+// declared method denoted by meth.Obj(), first performing any
+// necessary pointer indirections or field selections implied by meth.
+//
+// The resulting method's receiver type is meth.Recv().
+//
+// This function is versatile but quite subtle! Consider the
+// following axes of variation when making changes:
+// - optional receiver indirection
+// - optional implicit field selections
+// - meth.Obj() may denote a concrete or an interface method
+// - the result may be a thunk or a wrapper.
+//
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+//
+func makeWrapper(prog *Program, sel *types.Selection) *Function {
+ obj := sel.Obj().(*types.Func) // the declared function
+ sig := sel.Type().(*types.Signature) // type of this wrapper
+
+ var recv *types.Var // wrapper's receiver or thunk's params[0]
+ name := obj.Name()
+ var description string
+ var start int // first regular param
+ if sel.Kind() == types.MethodExpr {
+ name += "$thunk"
+ description = "thunk"
+ recv = sig.Params().At(0)
+ start = 1
+ } else {
+ description = "wrapper"
+ recv = sig.Recv()
+ }
+
+ description = fmt.Sprintf("%s for %s", description, sel.Obj())
+ if prog.mode&LogSource != 0 {
+ defer logStack("make %s to (%s)", description, recv.Type())()
+ }
+ fn := &Function{
+ name: name,
+ method: sel,
+ object: obj,
+ Signature: sig,
+ Synthetic: description,
+ Prog: prog,
+ pos: obj.Pos(),
+ }
+ fn.startBody()
+ fn.addSpilledParam(recv)
+ createParams(fn, start)
+
+ indices := sel.Index()
+
+ var v Value = fn.Locals[0] // spilled receiver
+ if isPointer(sel.Recv()) {
+ v = emitLoad(fn, v)
+
+ // For simple indirection wrappers, perform an informative nil-check:
+ // "value method (T).f called using nil *T pointer"
+ if len(indices) == 1 && !isPointer(recvType(obj)) {
+ var c Call
+ c.Call.Value = &Builtin{
+ name: "ssa:wrapnilchk",
+ sig: types.NewSignature(nil,
+ types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)),
+ types.NewTuple(anonVar(sel.Recv())), false),
+ }
+ c.Call.Args = []Value{
+ v,
+ stringConst(deref(sel.Recv()).String()),
+ stringConst(sel.Obj().Name()),
+ }
+ c.setType(v.Type())
+ v = fn.emit(&c)
+ }
+ }
+
+ // Invariant: v is a pointer, either
+ // value of *A receiver param, or
+ // address of A spilled receiver.
+
+ // We use pointer arithmetic (FieldAddr possibly followed by
+ // Load) in preference to value extraction (Field possibly
+ // preceded by Load).
+
+ v = emitImplicitSelections(fn, v, indices[:len(indices)-1])
+
+ // Invariant: v is a pointer, either
+ // value of implicit *C field, or
+ // address of implicit C field.
+
+ var c Call
+ if r := recvType(obj); !isInterface(r) { // concrete method
+ if !isPointer(r) {
+ v = emitLoad(fn, v)
+ }
+ c.Call.Value = prog.declaredFunc(obj)
+ c.Call.Args = append(c.Call.Args, v)
+ } else {
+ c.Call.Method = obj
+ c.Call.Value = emitLoad(fn, v)
+ }
+ for _, arg := range fn.Params[1:] {
+ c.Call.Args = append(c.Call.Args, arg)
+ }
+ emitTailCall(fn, &c)
+ fn.finishBody()
+ return fn
+}
+
+// createParams creates parameters for wrapper method fn based on its
+// Signature.Params, which do not include the receiver.
+// start is the index of the first regular parameter to use.
+//
+func createParams(fn *Function, start int) {
+ var last *Parameter
+ tparams := fn.Signature.Params()
+ for i, n := start, tparams.Len(); i < n; i++ {
+ last = fn.addParamObj(tparams.At(i))
+ }
+ if fn.Signature.Variadic() {
+ last.typ = types.NewSlice(last.typ)
+ }
+}
+
+// -- bounds -----------------------------------------------------------
+
+// makeBound returns a bound method wrapper (or "bound"), a synthetic
+// function that delegates to a concrete or interface method denoted
+// by obj. The resulting function has no receiver, but has one free
+// variable which will be used as the method's receiver in the
+// tail-call.
+//
+// Use MakeClosure with such a wrapper to construct a bound method
+// closure. e.g.:
+//
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// var t T
+// f := t.meth
+// f() // calls t.meth()
+//
+// f is a closure of a synthetic wrapper defined as if by:
+//
+// f := func() { return t.meth() }
+//
+// Unlike makeWrapper, makeBound need perform no indirection or field
+// selections because that can be done before the closure is
+// constructed.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
+//
+func makeBound(prog *Program, obj *types.Func) *Function {
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+ fn, ok := prog.bounds[obj]
+ if !ok {
+ description := fmt.Sprintf("bound method wrapper for %s", obj)
+ if prog.mode&LogSource != 0 {
+ defer logStack("%s", description)()
+ }
+ fn = &Function{
+ name: obj.Name() + "$bound",
+ object: obj,
+ Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver
+ Synthetic: description,
+ Prog: prog,
+ pos: obj.Pos(),
+ }
+
+ fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn}
+ fn.FreeVars = []*FreeVar{fv}
+ fn.startBody()
+ createParams(fn, 0)
+ var c Call
+
+ if !isInterface(recvType(obj)) { // concrete
+ c.Call.Value = prog.declaredFunc(obj)
+ c.Call.Args = []Value{fv}
+ } else {
+ c.Call.Value = fv
+ c.Call.Method = obj
+ }
+ for _, arg := range fn.Params {
+ c.Call.Args = append(c.Call.Args, arg)
+ }
+ emitTailCall(fn, &c)
+ fn.finishBody()
+
+ prog.bounds[obj] = fn
+ }
+ return fn
+}
+
+// -- thunks -----------------------------------------------------------
+
+// makeThunk returns a thunk, a synthetic function that delegates to a
+// concrete or interface method denoted by sel.Obj(). The resulting
+// function has no receiver, but has an additional (first) regular
+// parameter.
+//
+// Precondition: sel.Kind() == types.MethodExpr.
+//
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// f := T.meth
+// var t T
+// f(t) // calls t.meth()
+//
+// f is a synthetic wrapper defined as if by:
+//
+// f := func(t T) { return t.meth() }
+//
+// TODO(adonovan): opt: currently the stub is created even when used
+// directly in a function call: C.f(i, 0). This is less efficient
+// than inlining the stub.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
+//
+func makeThunk(prog *Program, sel *types.Selection) *Function {
+ if sel.Kind() != types.MethodExpr {
+ panic(sel)
+ }
+
+ key := selectionKey{
+ kind: sel.Kind(),
+ recv: sel.Recv(),
+ obj: sel.Obj(),
+ index: fmt.Sprint(sel.Index()),
+ indirect: sel.Indirect(),
+ }
+
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ // Canonicalize key.recv to avoid constructing duplicate thunks.
+ canonRecv, ok := prog.canon.At(key.recv).(types.Type)
+ if !ok {
+ canonRecv = key.recv
+ prog.canon.Set(key.recv, canonRecv)
+ }
+ key.recv = canonRecv
+
+ fn, ok := prog.thunks[key]
+ if !ok {
+ fn = makeWrapper(prog, sel)
+ if fn.Signature.Recv() != nil {
+ panic(fn) // unexpected receiver
+ }
+ prog.thunks[key] = fn
+ }
+ return fn
+}
+
+func changeRecv(s *types.Signature, recv *types.Var) *types.Signature {
+ return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic())
+}
+
+// selectionKey is like types.Selection but a usable map key.
+type selectionKey struct {
+ kind types.SelectionKind
+ recv types.Type // canonicalized via Program.canon
+ obj types.Object
+ index string
+ indirect bool
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/example_test.go b/vendor/golang.org/x/tools/go/types/typeutil/example_test.go
new file mode 100644
index 0000000..86c4d44
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/example_test.go
@@ -0,0 +1,67 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil_test
+
+import (
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "sort"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+func ExampleMap() {
+ const source = `package P
+
+var X []string
+var Y []string
+
+const p, q = 1.0, 2.0
+
+func f(offset int32) (value byte, ok bool)
+func g(rune) (uint8, bool)
+`
+
+ // Parse and type-check the package.
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "P.go", source, 0)
+ if err != nil {
+ panic(err)
+ }
+ pkg, err := new(types.Config).Check("P", fset, []*ast.File{f}, nil)
+ if err != nil {
+ panic(err)
+ }
+
+ scope := pkg.Scope()
+
+ // Group names of package-level objects by their type.
+ var namesByType typeutil.Map // value is []string
+ for _, name := range scope.Names() {
+ T := scope.Lookup(name).Type()
+
+ names, _ := namesByType.At(T).([]string)
+ names = append(names, name)
+ namesByType.Set(T, names)
+ }
+
+ // Format, sort, and print the map entries.
+ var lines []string
+ namesByType.Iterate(func(T types.Type, names interface{}) {
+ lines = append(lines, fmt.Sprintf("%s %s", names, T))
+ })
+ sort.Strings(lines)
+ for _, line := range lines {
+ fmt.Println(line)
+ }
+
+ // Output:
+ // [X Y] []string
+ // [f g] func(offset int32) (value byte, ok bool)
+ // [p q] untyped float
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
new file mode 100644
index 0000000..9c441db
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
@@ -0,0 +1,31 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import "go/types"
+
+// Dependencies returns all dependencies of the specified packages.
+//
+// Dependent packages appear in topological order: if package P imports
+// package Q, Q appears earlier than P in the result.
+// The algorithm follows import statements in the order they
+// appear in the source code, so the result is a total order.
+//
+func Dependencies(pkgs ...*types.Package) []*types.Package {
+ var result []*types.Package
+ seen := make(map[*types.Package]bool)
+ var visit func(pkgs []*types.Package)
+ visit = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !seen[p] {
+ seen[p] = true
+ visit(p.Imports())
+ result = append(result, p)
+ }
+ }
+ }
+ visit(pkgs)
+ return result
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports_test.go b/vendor/golang.org/x/tools/go/types/typeutil/imports_test.go
new file mode 100644
index 0000000..c8ef6d6
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/imports_test.go
@@ -0,0 +1,80 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil_test
+
+import (
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "testing"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+type closure map[string]*types.Package
+
+func (c closure) Import(path string) (*types.Package, error) { return c[path], nil }
+
+func TestDependencies(t *testing.T) {
+ packages := make(map[string]*types.Package)
+ conf := types.Config{
+ Importer: closure(packages),
+ }
+ fset := token.NewFileSet()
+
+ // All edges go to the right.
+ // /--D--B--A
+ // F \_C_/
+ // \__E_/
+ for i, content := range []string{
+ `package a`,
+ `package c; import (_ "a")`,
+ `package b; import (_ "a")`,
+ `package e; import (_ "c")`,
+ `package d; import (_ "b"; _ "c")`,
+ `package f; import (_ "d"; _ "e")`,
+ } {
+ f, err := parser.ParseFile(fset, fmt.Sprintf("%d.go", i), content, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ pkg, err := conf.Check(f.Name.Name, fset, []*ast.File{f}, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ packages[pkg.Path()] = pkg
+ }
+
+ for _, test := range []struct {
+ roots, want string
+ }{
+ {"a", "a"},
+ {"b", "ab"},
+ {"c", "ac"},
+ {"d", "abcd"},
+ {"e", "ace"},
+ {"f", "abcdef"},
+
+ {"be", "abce"},
+ {"eb", "aceb"},
+ {"de", "abcde"},
+ {"ed", "acebd"},
+ {"ef", "acebdf"},
+ } {
+ var pkgs []*types.Package
+ for _, r := range test.roots {
+ pkgs = append(pkgs, packages[string(r)])
+ }
+ var got string
+ for _, p := range typeutil.Dependencies(pkgs...) {
+ got += p.Path()
+ }
+ if got != test.want {
+ t.Errorf("Dependencies(%q) = %q, want %q", test.roots, got, test.want)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
new file mode 100644
index 0000000..c7f7545
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -0,0 +1,313 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeutil defines various utilities for types, such as Map,
+// a mapping from types.Type to interface{} values.
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "reflect"
+)
+
+// Map is a hash-table-based mapping from types (types.Type) to
+// arbitrary interface{} values. The concrete types that implement
+// the Type interface are pointers. Since they are not canonicalized,
+// == cannot be used to check for equivalence, and thus we cannot
+// simply use a Go map.
+//
+// Just as with map[K]V, a nil *Map is a valid empty map.
+//
+// Not thread-safe.
+//
+type Map struct {
+ hasher Hasher // shared by many Maps
+ table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
+ length int // number of map entries
+}
+
+// entry is an entry (key/value association) in a hash bucket.
+type entry struct {
+ key types.Type
+ value interface{}
+}
+
+// SetHasher sets the hasher used by Map.
+//
+// All Hashers are functionally equivalent but contain internal state
+// used to cache the results of hashing previously seen types.
+//
+// A single Hasher created by MakeHasher() may be shared among many
+// Maps. This is recommended if the instances have many keys in
+// common, as it will amortize the cost of hash computation.
+//
+// A Hasher may grow without bound as new types are seen. Even when a
+// type is deleted from the map, the Hasher never shrinks, since other
+// types in the map may reference the deleted type indirectly.
+//
+// Hashers are not thread-safe, and read-only operations such as
+// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
+// read-lock) is require around all Map operations if a shared
+// hasher is accessed from multiple threads.
+//
+// If SetHasher is not called, the Map will create a private hasher at
+// the first call to Insert.
+//
+func (m *Map) SetHasher(hasher Hasher) {
+ m.hasher = hasher
+}
+
+// Delete removes the entry with the given key, if any.
+// It returns true if the entry was found.
+//
+func (m *Map) Delete(key types.Type) bool {
+ if m != nil && m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ for i, e := range bucket {
+ if e.key != nil && types.Identical(key, e.key) {
+ // We can't compact the bucket as it
+ // would disturb iterators.
+ bucket[i] = entry{}
+ m.length--
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// At returns the map entry for the given key.
+// The result is nil if the entry is not present.
+//
+func (m *Map) At(key types.Type) interface{} {
+ if m != nil && m.table != nil {
+ for _, e := range m.table[m.hasher.Hash(key)] {
+ if e.key != nil && types.Identical(key, e.key) {
+ return e.value
+ }
+ }
+ }
+ return nil
+}
+
+// Set sets the map entry for key to val,
+// and returns the previous entry, if any.
+func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) {
+ if m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ var hole *entry
+ for i, e := range bucket {
+ if e.key == nil {
+ hole = &bucket[i]
+ } else if types.Identical(key, e.key) {
+ prev = e.value
+ bucket[i].value = value
+ return
+ }
+ }
+
+ if hole != nil {
+ *hole = entry{key, value} // overwrite deleted entry
+ } else {
+ m.table[hash] = append(bucket, entry{key, value})
+ }
+ } else {
+ if m.hasher.memo == nil {
+ m.hasher = MakeHasher()
+ }
+ hash := m.hasher.Hash(key)
+ m.table = map[uint32][]entry{hash: {entry{key, value}}}
+ }
+
+ m.length++
+ return
+}
+
+// Len returns the number of map entries.
+func (m *Map) Len() int {
+ if m != nil {
+ return m.length
+ }
+ return 0
+}
+
+// Iterate calls function f on each entry in the map in unspecified order.
+//
+// If f should mutate the map, Iterate provides the same guarantees as
+// Go maps: if f deletes a map entry that Iterate has not yet reached,
+// f will not be invoked for it, but if f inserts a map entry that
+// Iterate has not yet reached, whether or not f will be invoked for
+// it is unspecified.
+//
+func (m *Map) Iterate(f func(key types.Type, value interface{})) {
+ if m != nil {
+ for _, bucket := range m.table {
+ for _, e := range bucket {
+ if e.key != nil {
+ f(e.key, e.value)
+ }
+ }
+ }
+ }
+}
+
+// Keys returns a new slice containing the set of map keys.
+// The order is unspecified.
+func (m *Map) Keys() []types.Type {
+ keys := make([]types.Type, 0, m.Len())
+ m.Iterate(func(key types.Type, _ interface{}) {
+ keys = append(keys, key)
+ })
+ return keys
+}
+
+func (m *Map) toString(values bool) string {
+ if m == nil {
+ return "{}"
+ }
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, "{")
+ sep := ""
+ m.Iterate(func(key types.Type, value interface{}) {
+ fmt.Fprint(&buf, sep)
+ sep = ", "
+ fmt.Fprint(&buf, key)
+ if values {
+ fmt.Fprintf(&buf, ": %q", value)
+ }
+ })
+ fmt.Fprint(&buf, "}")
+ return buf.String()
+}
+
+// String returns a string representation of the map's entries.
+// Values are printed using fmt.Sprintf("%v", v).
+// Order is unspecified.
+//
+func (m *Map) String() string {
+ return m.toString(true)
+}
+
+// KeysString returns a string representation of the map's key set.
+// Order is unspecified.
+//
+func (m *Map) KeysString() string {
+ return m.toString(false)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Hasher
+
+// A Hasher maps each type to its hash value.
+// For efficiency, a hasher uses memoization; thus its memory
+// footprint grows monotonically over time.
+// Hashers are not thread-safe.
+// Hashers have reference semantics.
+// Call MakeHasher to create a Hasher.
+type Hasher struct {
+ memo map[types.Type]uint32
+}
+
+// MakeHasher returns a new Hasher instance.
+func MakeHasher() Hasher {
+ return Hasher{make(map[types.Type]uint32)}
+}
+
+// Hash computes a hash value for the given type t such that
+// Identical(t, t') => Hash(t) == Hash(t').
+func (h Hasher) Hash(t types.Type) uint32 {
+ hash, ok := h.memo[t]
+ if !ok {
+ hash = h.hashFor(t)
+ h.memo[t] = hash
+ }
+ return hash
+}
+
+// hashString computes the Fowler–Noll–Vo hash of s.
+func hashString(s string) uint32 {
+ var h uint32
+ for i := 0; i < len(s); i++ {
+ h ^= uint32(s[i])
+ h *= 16777619
+ }
+ return h
+}
+
+// hashFor computes the hash of t.
+func (h Hasher) hashFor(t types.Type) uint32 {
+ // See Identical for rationale.
+ switch t := t.(type) {
+ case *types.Basic:
+ return uint32(t.Kind())
+
+ case *types.Array:
+ return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
+
+ case *types.Slice:
+ return 9049 + 2*h.Hash(t.Elem())
+
+ case *types.Struct:
+ var hash uint32 = 9059
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ f := t.Field(i)
+ if f.Anonymous() {
+ hash += 8861
+ }
+ hash += hashString(t.Tag(i))
+ hash += hashString(f.Name()) // (ignore f.Pkg)
+ hash += h.Hash(f.Type())
+ }
+ return hash
+
+ case *types.Pointer:
+ return 9067 + 2*h.Hash(t.Elem())
+
+ case *types.Signature:
+ var hash uint32 = 9091
+ if t.Variadic() {
+ hash *= 8863
+ }
+ return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
+
+ case *types.Interface:
+ var hash uint32 = 9103
+ for i, n := 0, t.NumMethods(); i < n; i++ {
+ // See go/types.identicalMethods for rationale.
+ // Method order is not significant.
+ // Ignore m.Pkg().
+ m := t.Method(i)
+ hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
+ }
+ return hash
+
+ case *types.Map:
+ return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
+
+ case *types.Chan:
+ return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
+
+ case *types.Named:
+ // Not safe with a copying GC; objects may move.
+ return uint32(reflect.ValueOf(t.Obj()).Pointer())
+
+ case *types.Tuple:
+ return h.hashTuple(t)
+ }
+ panic(t)
+}
+
+func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
+ // See go/types.identicalTypes for rationale.
+ n := tuple.Len()
+ var hash uint32 = 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 3 * h.Hash(tuple.At(i).Type())
+ }
+ return hash
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map_test.go b/vendor/golang.org/x/tools/go/types/typeutil/map_test.go
new file mode 100644
index 0000000..34facbe
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map_test.go
@@ -0,0 +1,174 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil_test
+
+// TODO(adonovan):
+// - test use of explicit hasher across two maps.
+// - test hashcodes are consistent with equals for a range of types
+// (e.g. all types generated by type-checking some body of real code).
+
+import (
+ "go/types"
+ "testing"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+var (
+ tStr = types.Typ[types.String] // string
+ tPStr1 = types.NewPointer(tStr) // *string
+ tPStr2 = types.NewPointer(tStr) // *string, again
+ tInt = types.Typ[types.Int] // int
+ tChanInt1 = types.NewChan(types.RecvOnly, tInt) // <-chan int
+ tChanInt2 = types.NewChan(types.RecvOnly, tInt) // <-chan int, again
+)
+
+func checkEqualButNotIdentical(t *testing.T, x, y types.Type, comment string) {
+ if !types.Identical(x, y) {
+ t.Errorf("%s: not equal: %s, %s", comment, x, y)
+ }
+ if x == y {
+ t.Errorf("%s: identical: %v, %v", comment, x, y)
+ }
+}
+
+func TestAxioms(t *testing.T) {
+ checkEqualButNotIdentical(t, tPStr1, tPStr2, "tPstr{1,2}")
+ checkEqualButNotIdentical(t, tChanInt1, tChanInt2, "tChanInt{1,2}")
+}
+
+func TestMap(t *testing.T) {
+ var tmap *typeutil.Map
+
+ // All methods but Set are safe on on (*T)(nil).
+ tmap.Len()
+ tmap.At(tPStr1)
+ tmap.Delete(tPStr1)
+ tmap.KeysString()
+ tmap.String()
+
+ tmap = new(typeutil.Map)
+
+ // Length of empty map.
+ if l := tmap.Len(); l != 0 {
+ t.Errorf("Len() on empty Map: got %d, want 0", l)
+ }
+ // At of missing key.
+ if v := tmap.At(tPStr1); v != nil {
+ t.Errorf("At() on empty Map: got %v, want nil", v)
+ }
+ // Deletion of missing key.
+ if tmap.Delete(tPStr1) {
+ t.Errorf("Delete() on empty Map: got true, want false")
+ }
+ // Set of new key.
+ if prev := tmap.Set(tPStr1, "*string"); prev != nil {
+ t.Errorf("Set() on empty Map returned non-nil previous value %s", prev)
+ }
+
+ // Now: {*string: "*string"}
+
+ // Length of non-empty map.
+ if l := tmap.Len(); l != 1 {
+ t.Errorf("Len(): got %d, want 1", l)
+ }
+ // At via insertion key.
+ if v := tmap.At(tPStr1); v != "*string" {
+ t.Errorf("At(): got %q, want \"*string\"", v)
+ }
+ // At via equal key.
+ if v := tmap.At(tPStr2); v != "*string" {
+ t.Errorf("At(): got %q, want \"*string\"", v)
+ }
+ // Iteration over sole entry.
+ tmap.Iterate(func(key types.Type, value interface{}) {
+ if key != tPStr1 {
+ t.Errorf("Iterate: key: got %s, want %s", key, tPStr1)
+ }
+ if want := "*string"; value != want {
+ t.Errorf("Iterate: value: got %s, want %s", value, want)
+ }
+ })
+
+ // Setion with key equal to present one.
+ if prev := tmap.Set(tPStr2, "*string again"); prev != "*string" {
+ t.Errorf("Set() previous value: got %s, want \"*string\"", prev)
+ }
+
+ // Setion of another association.
+ if prev := tmap.Set(tChanInt1, "<-chan int"); prev != nil {
+ t.Errorf("Set() previous value: got %s, want nil", prev)
+ }
+
+ // Now: {*string: "*string again", <-chan int: "<-chan int"}
+
+ want1 := "{*string: \"*string again\", <-chan int: \"<-chan int\"}"
+ want2 := "{<-chan int: \"<-chan int\", *string: \"*string again\"}"
+ if s := tmap.String(); s != want1 && s != want2 {
+ t.Errorf("String(): got %s, want %s", s, want1)
+ }
+
+ want1 = "{*string, <-chan int}"
+ want2 = "{<-chan int, *string}"
+ if s := tmap.KeysString(); s != want1 && s != want2 {
+ t.Errorf("KeysString(): got %s, want %s", s, want1)
+ }
+
+ // Keys().
+ I := types.Identical
+ switch k := tmap.Keys(); {
+ case I(k[0], tChanInt1) && I(k[1], tPStr1): // ok
+ case I(k[1], tChanInt1) && I(k[0], tPStr1): // ok
+ default:
+ t.Errorf("Keys(): got %v, want %s", k, want2)
+ }
+
+ if l := tmap.Len(); l != 2 {
+ t.Errorf("Len(): got %d, want 1", l)
+ }
+ // At via original key.
+ if v := tmap.At(tPStr1); v != "*string again" {
+ t.Errorf("At(): got %q, want \"*string again\"", v)
+ }
+ hamming := 1
+ tmap.Iterate(func(key types.Type, value interface{}) {
+ switch {
+ case I(key, tChanInt1):
+ hamming *= 2 // ok
+ case I(key, tPStr1):
+ hamming *= 3 // ok
+ }
+ })
+ if hamming != 6 {
+ t.Errorf("Iterate: hamming: got %d, want %d", hamming, 6)
+ }
+
+ if v := tmap.At(tChanInt2); v != "<-chan int" {
+ t.Errorf("At(): got %q, want \"<-chan int\"", v)
+ }
+ // Deletion with key equal to present one.
+ if !tmap.Delete(tChanInt2) {
+ t.Errorf("Delete() of existing key: got false, want true")
+ }
+
+ // Now: {*string: "*string again"}
+
+ if l := tmap.Len(); l != 1 {
+ t.Errorf("Len(): got %d, want 1", l)
+ }
+ // Deletion again.
+ if !tmap.Delete(tPStr2) {
+ t.Errorf("Delete() of existing key: got false, want true")
+ }
+
+ // Now: {}
+
+ if l := tmap.Len(); l != 0 {
+ t.Errorf("Len(): got %d, want %d", l, 0)
+ }
+ if s := tmap.String(); s != "{}" {
+ t.Errorf("Len(): got %q, want %q", s, "")
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
new file mode 100644
index 0000000..3208461
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
@@ -0,0 +1,72 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a cache of method sets.
+
+package typeutil
+
+import (
+ "go/types"
+ "sync"
+)
+
+// A MethodSetCache records the method set of each type T for which
+// MethodSet(T) is called so that repeat queries are fast.
+// The zero value is a ready-to-use cache instance.
+type MethodSetCache struct {
+ mu sync.Mutex
+ named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N
+ others map[types.Type]*types.MethodSet // all other types
+}
+
+// MethodSet returns the method set of type T. It is thread-safe.
+//
+// If cache is nil, this function is equivalent to types.NewMethodSet(T).
+// Utility functions can thus expose an optional *MethodSetCache
+// parameter to clients that care about performance.
+//
+func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
+ if cache == nil {
+ return types.NewMethodSet(T)
+ }
+ cache.mu.Lock()
+ defer cache.mu.Unlock()
+
+ switch T := T.(type) {
+ case *types.Named:
+ return cache.lookupNamed(T).value
+
+ case *types.Pointer:
+ if N, ok := T.Elem().(*types.Named); ok {
+ return cache.lookupNamed(N).pointer
+ }
+ }
+
+ // all other types
+ // (The map uses pointer equivalence, not type identity.)
+ mset := cache.others[T]
+ if mset == nil {
+ mset = types.NewMethodSet(T)
+ if cache.others == nil {
+ cache.others = make(map[types.Type]*types.MethodSet)
+ }
+ cache.others[T] = mset
+ }
+ return mset
+}
+
+func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } {
+ if cache.named == nil {
+ cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet })
+ }
+ // Avoid recomputing mset(*T) for each distinct Pointer
+ // instance whose underlying type is a named type.
+ msets, ok := cache.named[named]
+ if !ok {
+ msets.value = types.NewMethodSet(named)
+ msets.pointer = types.NewMethodSet(types.NewPointer(named))
+ cache.named[named] = msets
+ }
+ return msets
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
new file mode 100644
index 0000000..9849c24
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
@@ -0,0 +1,52 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+// This file defines utilities for user interfaces that display types.
+
+import "go/types"
+
+// IntuitiveMethodSet returns the intuitive method set of a type T,
+// which is the set of methods you can call on an addressable value of
+// that type.
+//
+// The result always contains MethodSet(T), and is exactly MethodSet(T)
+// for interface types and for pointer-to-concrete types.
+// For all other concrete types T, the result additionally
+// contains each method belonging to *T if there is no identically
+// named method on T itself.
+//
+// This corresponds to user intuition about method sets;
+// this function is intended only for user interfaces.
+//
+// The order of the result is as for types.MethodSet(T).
+//
+func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
+ isPointerToConcrete := func(T types.Type) bool {
+ ptr, ok := T.(*types.Pointer)
+ return ok && !types.IsInterface(ptr.Elem())
+ }
+
+ var result []*types.Selection
+ mset := msets.MethodSet(T)
+ if types.IsInterface(T) || isPointerToConcrete(T) {
+ for i, n := 0, mset.Len(); i < n; i++ {
+ result = append(result, mset.At(i))
+ }
+ } else {
+ // T is some other concrete type.
+ // Report methods of T and *T, preferring those of T.
+ pmset := msets.MethodSet(types.NewPointer(T))
+ for i, n := 0, pmset.Len(); i < n; i++ {
+ meth := pmset.At(i)
+ if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil {
+ meth = m
+ }
+ result = append(result, meth)
+ }
+
+ }
+ return result
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui_test.go b/vendor/golang.org/x/tools/go/types/typeutil/ui_test.go
new file mode 100644
index 0000000..b5064ac
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/ui_test.go
@@ -0,0 +1,61 @@
+package typeutil_test
+
+import (
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+func TestIntuitiveMethodSet(t *testing.T) {
+ const source = `
+package P
+type A int
+func (A) f()
+func (*A) g()
+`
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "hello.go", source, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var conf types.Config
+ pkg, err := conf.Check("P", fset, []*ast.File{f}, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ qual := types.RelativeTo(pkg)
+
+ for _, test := range []struct {
+ expr string // type expression
+ want string // intuitive method set
+ }{
+ {"A", "(A).f (*A).g"},
+ {"*A", "(*A).f (*A).g"},
+ {"error", "(error).Error"},
+ {"*error", ""},
+ {"struct{A}", "(struct{A}).f (*struct{A}).g"},
+ {"*struct{A}", "(*struct{A}).f (*struct{A}).g"},
+ } {
+ tv, err := types.Eval(fset, pkg, 0, test.expr)
+ if err != nil {
+ t.Errorf("Eval(%s) failed: %v", test.expr, err)
+ }
+ var names []string
+ for _, m := range typeutil.IntuitiveMethodSet(tv.Type, nil) {
+ name := fmt.Sprintf("(%s).%s", types.TypeString(m.Recv(), qual), m.Obj().Name())
+ names = append(names, name)
+ }
+ got := strings.Join(names, " ")
+ if got != test.want {
+ t.Errorf("IntuitiveMethodSet(%s) = %q, want %q", test.expr, got, test.want)
+ }
+ }
+}