aboutsummaryrefslogtreecommitdiff
path: root/vendor/honnef.co
diff options
context:
space:
mode:
authorJoseph Richey <joerichey@google.com>2018-02-11 21:43:56 -0800
committerGitHub <noreply@github.com>2018-02-11 21:43:56 -0800
commit8b6cbfcca9d1f3fb5b05a91b4ac0bca66f75f19e (patch)
tree5f6fdc40c7518bc23a63fdac9a92b516dd0c6d36 /vendor/honnef.co
parentaebae1aae2fc61185a8bbc96313d8462727ad202 (diff)
parentb330463662825a7d7b22efabb2b7a40640d2b18e (diff)
Merge pull request #85 from google/depfix
Complete the new Build System
Diffstat (limited to 'vendor/honnef.co')
-rw-r--r--vendor/honnef.co/go/tools/LICENSE20
-rw-r--r--vendor/honnef.co/go/tools/callgraph/callgraph.go129
-rw-r--r--vendor/honnef.co/go/tools/callgraph/static/static.go35
-rw-r--r--vendor/honnef.co/go/tools/callgraph/util.go181
-rw-r--r--vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go122
-rw-r--r--vendor/honnef.co/go/tools/deprecated/stdlib.go54
-rw-r--r--vendor/honnef.co/go/tools/functions/concrete.go56
-rw-r--r--vendor/honnef.co/go/tools/functions/functions.go150
-rw-r--r--vendor/honnef.co/go/tools/functions/loops.go50
-rw-r--r--vendor/honnef.co/go/tools/functions/pure.go123
-rw-r--r--vendor/honnef.co/go/tools/functions/terminates.go24
-rw-r--r--vendor/honnef.co/go/tools/gcsizes/LICENSE27
-rw-r--r--vendor/honnef.co/go/tools/internal/sharedcheck/lint.go70
-rw-r--r--vendor/honnef.co/go/tools/lint/LICENSE28
-rw-r--r--vendor/honnef.co/go/tools/lint/lint.go844
-rw-r--r--vendor/honnef.co/go/tools/lint/lintutil/util.go349
-rw-r--r--vendor/honnef.co/go/tools/simple/lint.go1771
-rw-r--r--vendor/honnef.co/go/tools/simple/lint17.go7
-rw-r--r--vendor/honnef.co/go/tools/simple/lint18.go7
-rw-r--r--vendor/honnef.co/go/tools/ssa/LICENSE28
-rw-r--r--vendor/honnef.co/go/tools/ssa/blockopt.go195
-rw-r--r--vendor/honnef.co/go/tools/ssa/builder.go2383
-rw-r--r--vendor/honnef.co/go/tools/ssa/const.go171
-rw-r--r--vendor/honnef.co/go/tools/ssa/const15.go171
-rw-r--r--vendor/honnef.co/go/tools/ssa/create.go263
-rw-r--r--vendor/honnef.co/go/tools/ssa/doc.go123
-rw-r--r--vendor/honnef.co/go/tools/ssa/dom.go341
-rw-r--r--vendor/honnef.co/go/tools/ssa/emit.go475
-rw-r--r--vendor/honnef.co/go/tools/ssa/func.go703
-rw-r--r--vendor/honnef.co/go/tools/ssa/identical.go7
-rw-r--r--vendor/honnef.co/go/tools/ssa/identical_17.go7
-rw-r--r--vendor/honnef.co/go/tools/ssa/lift.go608
-rw-r--r--vendor/honnef.co/go/tools/ssa/lvalue.go125
-rw-r--r--vendor/honnef.co/go/tools/ssa/methods.go241
-rw-r--r--vendor/honnef.co/go/tools/ssa/mode.go100
-rw-r--r--vendor/honnef.co/go/tools/ssa/print.go433
-rw-r--r--vendor/honnef.co/go/tools/ssa/sanity.go523
-rw-r--r--vendor/honnef.co/go/tools/ssa/source.go299
-rw-r--r--vendor/honnef.co/go/tools/ssa/ssa.go1751
-rw-r--r--vendor/honnef.co/go/tools/ssa/ssautil/load.go97
-rw-r--r--vendor/honnef.co/go/tools/ssa/ssautil/switch.go236
-rw-r--r--vendor/honnef.co/go/tools/ssa/ssautil/visit.go79
-rw-r--r--vendor/honnef.co/go/tools/ssa/testmain.go266
-rw-r--r--vendor/honnef.co/go/tools/ssa/util.go121
-rw-r--r--vendor/honnef.co/go/tools/ssa/wrappers.go296
-rw-r--r--vendor/honnef.co/go/tools/ssa/write.go5
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/buildtag.go21
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/lint.go2738
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/rules.go321
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/channel.go73
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/int.go476
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/slice.go273
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/string.go258
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go1049
-rw-r--r--vendor/honnef.co/go/tools/unused/unused.go1064
-rw-r--r--vendor/honnef.co/go/tools/version/version.go17
56 files changed, 20384 insertions, 0 deletions
diff --git a/vendor/honnef.co/go/tools/LICENSE b/vendor/honnef.co/go/tools/LICENSE
new file mode 100644
index 0000000..dfd0314
--- /dev/null
+++ b/vendor/honnef.co/go/tools/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2016 Dominik Honnef
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/honnef.co/go/tools/callgraph/callgraph.go b/vendor/honnef.co/go/tools/callgraph/callgraph.go
new file mode 100644
index 0000000..d93a20a
--- /dev/null
+++ b/vendor/honnef.co/go/tools/callgraph/callgraph.go
@@ -0,0 +1,129 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+
+Package callgraph defines the call graph and various algorithms
+and utilities to operate on it.
+
+A call graph is a labelled directed graph whose nodes represent
+functions and whose edge labels represent syntactic function call
+sites. The presence of a labelled edge (caller, site, callee)
+indicates that caller may call callee at the specified call site.
+
+A call graph is a multigraph: it may contain multiple edges (caller,
+*, callee) connecting the same pair of nodes, so long as the edges
+differ by label; this occurs when one function calls another function
+from multiple call sites. Also, it may contain multiple edges
+(caller, site, *) that differ only by callee; this indicates a
+polymorphic call.
+
+A SOUND call graph is one that overapproximates the dynamic calling
+behaviors of the program in all possible executions. One call graph
+is more PRECISE than another if it is a smaller overapproximation of
+the dynamic behavior.
+
+All call graphs have a synthetic root node which is responsible for
+calling main() and init().
+
+Calls to built-in functions (e.g. panic, println) are not represented
+in the call graph; they are treated like built-in operators of the
+language.
+
+*/
+package callgraph // import "honnef.co/go/tools/callgraph"
+
+// TODO(adonovan): add a function to eliminate wrappers from the
+// callgraph, preserving topology.
+// More generally, we could eliminate "uninteresting" nodes such as
+// nodes from packages we don't care about.
+
+import (
+ "fmt"
+ "go/token"
+
+ "honnef.co/go/tools/ssa"
+)
+
+// A Graph represents a call graph.
+//
+// A graph may contain nodes that are not reachable from the root.
+// If the call graph is sound, such nodes indicate unreachable
+// functions.
+//
+type Graph struct {
+ Root *Node // the distinguished root node
+ Nodes map[*ssa.Function]*Node // all nodes by function
+}
+
+// New returns a new Graph with the specified root node.
+func New(root *ssa.Function) *Graph {
+ g := &Graph{Nodes: make(map[*ssa.Function]*Node)}
+ g.Root = g.CreateNode(root)
+ return g
+}
+
+// CreateNode returns the Node for fn, creating it if not present.
+func (g *Graph) CreateNode(fn *ssa.Function) *Node {
+ n, ok := g.Nodes[fn]
+ if !ok {
+ n = &Node{Func: fn, ID: len(g.Nodes)}
+ g.Nodes[fn] = n
+ }
+ return n
+}
+
+// A Node represents a node in a call graph.
+type Node struct {
+ Func *ssa.Function // the function this node represents
+ ID int // 0-based sequence number
+ In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n)
+ Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n)
+}
+
+func (n *Node) String() string {
+ return fmt.Sprintf("n%d:%s", n.ID, n.Func)
+}
+
+// A Edge represents an edge in the call graph.
+//
+// Site is nil for edges originating in synthetic or intrinsic
+// functions, e.g. reflect.Call or the root of the call graph.
+type Edge struct {
+ Caller *Node
+ Site ssa.CallInstruction
+ Callee *Node
+}
+
+func (e Edge) String() string {
+ return fmt.Sprintf("%s --> %s", e.Caller, e.Callee)
+}
+
+func (e Edge) Description() string {
+ var prefix string
+ switch e.Site.(type) {
+ case nil:
+ return "synthetic call"
+ case *ssa.Go:
+ prefix = "concurrent "
+ case *ssa.Defer:
+ prefix = "deferred "
+ }
+ return prefix + e.Site.Common().Description()
+}
+
+func (e Edge) Pos() token.Pos {
+ if e.Site == nil {
+ return token.NoPos
+ }
+ return e.Site.Pos()
+}
+
+// AddEdge adds the edge (caller, site, callee) to the call graph.
+// Elimination of duplicate edges is the caller's responsibility.
+func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) {
+ e := &Edge{caller, site, callee}
+ callee.In = append(callee.In, e)
+ caller.Out = append(caller.Out, e)
+}
diff --git a/vendor/honnef.co/go/tools/callgraph/static/static.go b/vendor/honnef.co/go/tools/callgraph/static/static.go
new file mode 100644
index 0000000..5444e84
--- /dev/null
+++ b/vendor/honnef.co/go/tools/callgraph/static/static.go
@@ -0,0 +1,35 @@
+// Package static computes the call graph of a Go program containing
+// only static call edges.
+package static // import "honnef.co/go/tools/callgraph/static"
+
+import (
+ "honnef.co/go/tools/callgraph"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/ssa/ssautil"
+)
+
+// CallGraph computes the call graph of the specified program
+// considering only static calls.
+//
+func CallGraph(prog *ssa.Program) *callgraph.Graph {
+ cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
+
+ // TODO(adonovan): opt: use only a single pass over the ssa.Program.
+ // TODO(adonovan): opt: this is slower than RTA (perhaps because
+ // the lower precision means so many edges are allocated)!
+ for f := range ssautil.AllFunctions(prog) {
+ fnode := cg.CreateNode(f)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if site, ok := instr.(ssa.CallInstruction); ok {
+ if g := site.Common().StaticCallee(); g != nil {
+ gnode := cg.CreateNode(g)
+ callgraph.AddEdge(fnode, site, gnode)
+ }
+ }
+ }
+ }
+ }
+
+ return cg
+}
diff --git a/vendor/honnef.co/go/tools/callgraph/util.go b/vendor/honnef.co/go/tools/callgraph/util.go
new file mode 100644
index 0000000..7aeda96
--- /dev/null
+++ b/vendor/honnef.co/go/tools/callgraph/util.go
@@ -0,0 +1,181 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package callgraph
+
+import "honnef.co/go/tools/ssa"
+
+// This file provides various utilities over call graphs, such as
+// visitation and path search.
+
+// CalleesOf returns a new set containing all direct callees of the
+// caller node.
+//
+func CalleesOf(caller *Node) map[*Node]bool {
+ callees := make(map[*Node]bool)
+ for _, e := range caller.Out {
+ callees[e.Callee] = true
+ }
+ return callees
+}
+
+// GraphVisitEdges visits all the edges in graph g in depth-first order.
+// The edge function is called for each edge in postorder. If it
+// returns non-nil, visitation stops and GraphVisitEdges returns that
+// value.
+//
+func GraphVisitEdges(g *Graph, edge func(*Edge) error) error {
+ seen := make(map[*Node]bool)
+ var visit func(n *Node) error
+ visit = func(n *Node) error {
+ if !seen[n] {
+ seen[n] = true
+ for _, e := range n.Out {
+ if err := visit(e.Callee); err != nil {
+ return err
+ }
+ if err := edge(e); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }
+ for _, n := range g.Nodes {
+ if err := visit(n); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// PathSearch finds an arbitrary path starting at node start and
+// ending at some node for which isEnd() returns true. On success,
+// PathSearch returns the path as an ordered list of edges; on
+// failure, it returns nil.
+//
+func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge {
+ stack := make([]*Edge, 0, 32)
+ seen := make(map[*Node]bool)
+ var search func(n *Node) []*Edge
+ search = func(n *Node) []*Edge {
+ if !seen[n] {
+ seen[n] = true
+ if isEnd(n) {
+ return stack
+ }
+ for _, e := range n.Out {
+ stack = append(stack, e) // push
+ if found := search(e.Callee); found != nil {
+ return found
+ }
+ stack = stack[:len(stack)-1] // pop
+ }
+ }
+ return nil
+ }
+ return search(start)
+}
+
+// DeleteSyntheticNodes removes from call graph g all nodes for
+// synthetic functions (except g.Root and package initializers),
+// preserving the topology. In effect, calls to synthetic wrappers
+// are "inlined".
+//
+func (g *Graph) DeleteSyntheticNodes() {
+ // Measurements on the standard library and go.tools show that
+ // resulting graph has ~15% fewer nodes and 4-8% fewer edges
+ // than the input.
+ //
+ // Inlining a wrapper of in-degree m, out-degree n adds m*n
+ // and removes m+n edges. Since most wrappers are monomorphic
+ // (n=1) this results in a slight reduction. Polymorphic
+ // wrappers (n>1), e.g. from embedding an interface value
+ // inside a struct to satisfy some interface, cause an
+ // increase in the graph, but they seem to be uncommon.
+
+ // Hash all existing edges to avoid creating duplicates.
+ edges := make(map[Edge]bool)
+ for _, cgn := range g.Nodes {
+ for _, e := range cgn.Out {
+ edges[*e] = true
+ }
+ }
+ for fn, cgn := range g.Nodes {
+ if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) {
+ continue // keep
+ }
+ for _, eIn := range cgn.In {
+ for _, eOut := range cgn.Out {
+ newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee}
+ if edges[newEdge] {
+ continue // don't add duplicate
+ }
+ AddEdge(eIn.Caller, eIn.Site, eOut.Callee)
+ edges[newEdge] = true
+ }
+ }
+ g.DeleteNode(cgn)
+ }
+}
+
+func isInit(fn *ssa.Function) bool {
+ return fn.Pkg != nil && fn.Pkg.Func("init") == fn
+}
+
+// DeleteNode removes node n and its edges from the graph g.
+// (NB: not efficient for batch deletion.)
+func (g *Graph) DeleteNode(n *Node) {
+ n.deleteIns()
+ n.deleteOuts()
+ delete(g.Nodes, n.Func)
+}
+
+// deleteIns deletes all incoming edges to n.
+func (n *Node) deleteIns() {
+ for _, e := range n.In {
+ removeOutEdge(e)
+ }
+ n.In = nil
+}
+
+// deleteOuts deletes all outgoing edges from n.
+func (n *Node) deleteOuts() {
+ for _, e := range n.Out {
+ removeInEdge(e)
+ }
+ n.Out = nil
+}
+
+// removeOutEdge removes edge.Caller's outgoing edge 'edge'.
+func removeOutEdge(edge *Edge) {
+ caller := edge.Caller
+ n := len(caller.Out)
+ for i, e := range caller.Out {
+ if e == edge {
+ // Replace it with the final element and shrink the slice.
+ caller.Out[i] = caller.Out[n-1]
+ caller.Out[n-1] = nil // aid GC
+ caller.Out = caller.Out[:n-1]
+ return
+ }
+ }
+ panic("edge not found: " + edge.String())
+}
+
+// removeInEdge removes edge.Callee's incoming edge 'edge'.
+func removeInEdge(edge *Edge) {
+ caller := edge.Callee
+ n := len(caller.In)
+ for i, e := range caller.In {
+ if e == edge {
+ // Replace it with the final element and shrink the slice.
+ caller.In[i] = caller.In[n-1]
+ caller.In[n-1] = nil // aid GC
+ caller.In = caller.In[:n-1]
+ return
+ }
+ }
+ panic("edge not found: " + edge.String())
+}
diff --git a/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go b/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go
new file mode 100644
index 0000000..4c0b97c
--- /dev/null
+++ b/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go
@@ -0,0 +1,122 @@
+// megacheck runs staticcheck, gosimple and unused.
+package main // import "honnef.co/go/tools/cmd/megacheck"
+
+import (
+ "os"
+
+ "honnef.co/go/tools/lint/lintutil"
+ "honnef.co/go/tools/simple"
+ "honnef.co/go/tools/staticcheck"
+ "honnef.co/go/tools/unused"
+)
+
+func main() {
+ var flags struct {
+ staticcheck struct {
+ enabled bool
+ generated bool
+ exitNonZero bool
+ }
+ gosimple struct {
+ enabled bool
+ generated bool
+ exitNonZero bool
+ }
+ unused struct {
+ enabled bool
+ constants bool
+ fields bool
+ functions bool
+ types bool
+ variables bool
+ debug string
+ wholeProgram bool
+ reflection bool
+ exitNonZero bool
+ }
+ }
+ fs := lintutil.FlagSet("megacheck")
+ fs.BoolVar(&flags.gosimple.enabled,
+ "simple.enabled", true, "Run gosimple")
+ fs.BoolVar(&flags.gosimple.generated,
+ "simple.generated", false, "Check generated code")
+ fs.BoolVar(&flags.gosimple.exitNonZero,
+ "simple.exit-non-zero", false, "Exit non-zero if any problems were found")
+
+ fs.BoolVar(&flags.staticcheck.enabled,
+ "staticcheck.enabled", true, "Run staticcheck")
+ fs.BoolVar(&flags.staticcheck.generated,
+ "staticcheck.generated", false, "Check generated code (only applies to a subset of checks)")
+ fs.BoolVar(&flags.staticcheck.exitNonZero,
+ "staticcheck.exit-non-zero", true, "Exit non-zero if any problems were found")
+
+ fs.BoolVar(&flags.unused.enabled,
+ "unused.enabled", true, "Run unused")
+ fs.BoolVar(&flags.unused.constants,
+ "unused.consts", true, "Report unused constants")
+ fs.BoolVar(&flags.unused.fields,
+ "unused.fields", true, "Report unused fields")
+ fs.BoolVar(&flags.unused.functions,
+ "unused.funcs", true, "Report unused functions and methods")
+ fs.BoolVar(&flags.unused.types,
+ "unused.types", true, "Report unused types")
+ fs.BoolVar(&flags.unused.variables,
+ "unused.vars", true, "Report unused variables")
+ fs.BoolVar(&flags.unused.wholeProgram,
+ "unused.exported", false, "Treat arguments as a program and report unused exported identifiers")
+ fs.BoolVar(&flags.unused.reflection,
+ "unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection")
+ fs.BoolVar(&flags.unused.exitNonZero,
+ "unused.exit-non-zero", true, "Exit non-zero if any problems were found")
+
+ fs.Parse(os.Args[1:])
+
+ var checkers []lintutil.CheckerConfig
+
+ if flags.staticcheck.enabled {
+ sac := staticcheck.NewChecker()
+ sac.CheckGenerated = flags.staticcheck.generated
+ checkers = append(checkers, lintutil.CheckerConfig{
+ Checker: sac,
+ ExitNonZero: flags.staticcheck.exitNonZero,
+ })
+ }
+
+ if flags.gosimple.enabled {
+ sc := simple.NewChecker()
+ sc.CheckGenerated = flags.gosimple.generated
+ checkers = append(checkers, lintutil.CheckerConfig{
+ Checker: sc,
+ ExitNonZero: flags.gosimple.exitNonZero,
+ })
+ }
+
+ if flags.unused.enabled {
+ var mode unused.CheckMode
+ if flags.unused.constants {
+ mode |= unused.CheckConstants
+ }
+ if flags.unused.fields {
+ mode |= unused.CheckFields
+ }
+ if flags.unused.functions {
+ mode |= unused.CheckFunctions
+ }
+ if flags.unused.types {
+ mode |= unused.CheckTypes
+ }
+ if flags.unused.variables {
+ mode |= unused.CheckVariables
+ }
+ uc := unused.NewChecker(mode)
+ uc.WholeProgram = flags.unused.wholeProgram
+ uc.ConsiderReflection = flags.unused.reflection
+ checkers = append(checkers, lintutil.CheckerConfig{
+ Checker: unused.NewLintChecker(uc),
+ ExitNonZero: flags.unused.exitNonZero,
+ })
+
+ }
+
+ lintutil.ProcessFlagSet(checkers, fs)
+}
diff --git a/vendor/honnef.co/go/tools/deprecated/stdlib.go b/vendor/honnef.co/go/tools/deprecated/stdlib.go
new file mode 100644
index 0000000..b6b217c
--- /dev/null
+++ b/vendor/honnef.co/go/tools/deprecated/stdlib.go
@@ -0,0 +1,54 @@
+package deprecated
+
+type Deprecation struct {
+ DeprecatedSince int
+ AlternativeAvailableSince int
+}
+
+var Stdlib = map[string]Deprecation{
+ "image/jpeg.Reader": {4, 0},
+ // FIXME(dh): AllowBinary isn't being detected as deprecated
+ // because the comment has a newline right after "Deprecated:"
+ "go/build.AllowBinary": {7, 7},
+ "(archive/zip.FileHeader).CompressedSize": {1, 1},
+ "(archive/zip.FileHeader).UncompressedSize": {1, 1},
+ "(go/doc.Package).Bugs": {1, 1},
+ "os.SEEK_SET": {7, 7},
+ "os.SEEK_CUR": {7, 7},
+ "os.SEEK_END": {7, 7},
+ "(net.Dialer).Cancel": {7, 7},
+ "runtime.CPUProfile": {9, 0},
+ "compress/flate.ReadError": {6, 6},
+ "compress/flate.WriteError": {6, 6},
+ "path/filepath.HasPrefix": {0, 0},
+ "(net/http.Transport).Dial": {7, 7},
+ "(*net/http.Transport).CancelRequest": {6, 5},
+ "net/http.ErrWriteAfterFlush": {7, 0},
+ "net/http.ErrHeaderTooLong": {8, 0},
+ "net/http.ErrShortBody": {8, 0},
+ "net/http.ErrMissingContentLength": {8, 0},
+ "net/http/httputil.ErrPersistEOF": {0, 0},
+ "net/http/httputil.ErrClosed": {0, 0},
+ "net/http/httputil.ErrPipeline": {0, 0},
+ "net/http/httputil.ServerConn": {0, 0},
+ "net/http/httputil.NewServerConn": {0, 0},
+ "net/http/httputil.ClientConn": {0, 0},
+ "net/http/httputil.NewClientConn": {0, 0},
+ "net/http/httputil.NewProxyClientConn": {0, 0},
+ "(net/http.Request).Cancel": {7, 7},
+ "(text/template/parse.PipeNode).Line": {1, 1},
+ "(text/template/parse.ActionNode).Line": {1, 1},
+ "(text/template/parse.BranchNode).Line": {1, 1},
+ "(text/template/parse.TemplateNode).Line": {1, 1},
+ "database/sql/driver.ColumnConverter": {9, 9},
+ "database/sql/driver.Execer": {8, 8},
+ "database/sql/driver.Queryer": {8, 8},
+ "(database/sql/driver.Conn).Begin": {8, 8},
+ "(database/sql/driver.Stmt).Exec": {8, 8},
+ "(database/sql/driver.Stmt).Query": {8, 8},
+ "syscall.StringByteSlice": {1, 1},
+ "syscall.StringBytePtr": {1, 1},
+ "syscall.StringSlicePtr": {1, 1},
+ "syscall.StringToUTF16": {1, 1},
+ "syscall.StringToUTF16Ptr": {1, 1},
+}
diff --git a/vendor/honnef.co/go/tools/functions/concrete.go b/vendor/honnef.co/go/tools/functions/concrete.go
new file mode 100644
index 0000000..932acd0
--- /dev/null
+++ b/vendor/honnef.co/go/tools/functions/concrete.go
@@ -0,0 +1,56 @@
+package functions
+
+import (
+ "go/token"
+ "go/types"
+
+ "honnef.co/go/tools/ssa"
+)
+
+func concreteReturnTypes(fn *ssa.Function) []*types.Tuple {
+ res := fn.Signature.Results()
+ if res == nil {
+ return nil
+ }
+ ifaces := make([]bool, res.Len())
+ any := false
+ for i := 0; i < res.Len(); i++ {
+ _, ifaces[i] = res.At(i).Type().Underlying().(*types.Interface)
+ any = any || ifaces[i]
+ }
+ if !any {
+ return []*types.Tuple{res}
+ }
+ var out []*types.Tuple
+ for _, block := range fn.Blocks {
+ if len(block.Instrs) == 0 {
+ continue
+ }
+ ret, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return)
+ if !ok {
+ continue
+ }
+ vars := make([]*types.Var, res.Len())
+ for i, v := range ret.Results {
+ var typ types.Type
+ if !ifaces[i] {
+ typ = res.At(i).Type()
+ } else if mi, ok := v.(*ssa.MakeInterface); ok {
+ // TODO(dh): if mi.X is a function call that returns
+ // an interface, call concreteReturnTypes on that
+ // function (or, really, go through Descriptions,
+ // avoid infinite recursion etc, just like nil error
+ // detection)
+
+ // TODO(dh): support Phi nodes
+ typ = mi.X.Type()
+ } else {
+ typ = res.At(i).Type()
+ }
+ vars[i] = types.NewParam(token.NoPos, nil, "", typ)
+ }
+ out = append(out, types.NewTuple(vars...))
+ }
+ // TODO(dh): deduplicate out
+ return out
+}
diff --git a/vendor/honnef.co/go/tools/functions/functions.go b/vendor/honnef.co/go/tools/functions/functions.go
new file mode 100644
index 0000000..c5fe2d7
--- /dev/null
+++ b/vendor/honnef.co/go/tools/functions/functions.go
@@ -0,0 +1,150 @@
+package functions
+
+import (
+ "go/types"
+ "sync"
+
+ "honnef.co/go/tools/callgraph"
+ "honnef.co/go/tools/callgraph/static"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/staticcheck/vrp"
+)
+
+var stdlibDescs = map[string]Description{
+ "errors.New": Description{Pure: true},
+
+ "fmt.Errorf": Description{Pure: true},
+ "fmt.Sprintf": Description{Pure: true},
+ "fmt.Sprint": Description{Pure: true},
+
+ "sort.Reverse": Description{Pure: true},
+
+ "strings.Map": Description{Pure: true},
+ "strings.Repeat": Description{Pure: true},
+ "strings.Replace": Description{Pure: true},
+ "strings.Title": Description{Pure: true},
+ "strings.ToLower": Description{Pure: true},
+ "strings.ToLowerSpecial": Description{Pure: true},
+ "strings.ToTitle": Description{Pure: true},
+ "strings.ToTitleSpecial": Description{Pure: true},
+ "strings.ToUpper": Description{Pure: true},
+ "strings.ToUpperSpecial": Description{Pure: true},
+ "strings.Trim": Description{Pure: true},
+ "strings.TrimFunc": Description{Pure: true},
+ "strings.TrimLeft": Description{Pure: true},
+ "strings.TrimLeftFunc": Description{Pure: true},
+ "strings.TrimPrefix": Description{Pure: true},
+ "strings.TrimRight": Description{Pure: true},
+ "strings.TrimRightFunc": Description{Pure: true},
+ "strings.TrimSpace": Description{Pure: true},
+ "strings.TrimSuffix": Description{Pure: true},
+
+ "(*net/http.Request).WithContext": Description{Pure: true},
+
+ "math/rand.Read": Description{NilError: true},
+ "(*math/rand.Rand).Read": Description{NilError: true},
+}
+
+type Description struct {
+ // The function is known to be pure
+ Pure bool
+ // The function is known to be a stub
+ Stub bool
+ // The function is known to never return (panics notwithstanding)
+ Infinite bool
+ // Variable ranges
+ Ranges vrp.Ranges
+ Loops []Loop
+ // Function returns an error as its last argument, but it is
+ // always nil
+ NilError bool
+ ConcreteReturnTypes []*types.Tuple
+}
+
+type descriptionEntry struct {
+ ready chan struct{}
+ result Description
+}
+
+type Descriptions struct {
+ CallGraph *callgraph.Graph
+ mu sync.Mutex
+ cache map[*ssa.Function]*descriptionEntry
+}
+
+func NewDescriptions(prog *ssa.Program) *Descriptions {
+ return &Descriptions{
+ CallGraph: static.CallGraph(prog),
+ cache: map[*ssa.Function]*descriptionEntry{},
+ }
+}
+
+func (d *Descriptions) Get(fn *ssa.Function) Description {
+ d.mu.Lock()
+ fd := d.cache[fn]
+ if fd == nil {
+ fd = &descriptionEntry{
+ ready: make(chan struct{}),
+ }
+ d.cache[fn] = fd
+ d.mu.Unlock()
+
+ {
+ fd.result = stdlibDescs[fn.RelString(nil)]
+ fd.result.Pure = fd.result.Pure || d.IsPure(fn)
+ fd.result.Stub = fd.result.Stub || d.IsStub(fn)
+ fd.result.Infinite = fd.result.Infinite || !terminates(fn)
+ fd.result.Ranges = vrp.BuildGraph(fn).Solve()
+ fd.result.Loops = findLoops(fn)
+ fd.result.NilError = fd.result.NilError || IsNilError(fn)
+ fd.result.ConcreteReturnTypes = concreteReturnTypes(fn)
+ }
+
+ close(fd.ready)
+ } else {
+ d.mu.Unlock()
+ <-fd.ready
+ }
+ return fd.result
+}
+
+func IsNilError(fn *ssa.Function) bool {
+ // TODO(dh): This is very simplistic, as we only look for constant
+ // nil returns. A more advanced approach would work transitively.
+ // An even more advanced approach would be context-aware and
+ // determine nil errors based on inputs (e.g. io.WriteString to a
+ // bytes.Buffer will always return nil, but an io.WriteString to
+ // an os.File might not). Similarly, an os.File opened for reading
+ // won't error on Close, but other files will.
+ res := fn.Signature.Results()
+ if res.Len() == 0 {
+ return false
+ }
+ last := res.At(res.Len() - 1)
+ if types.TypeString(last.Type(), nil) != "error" {
+ return false
+ }
+
+ if fn.Blocks == nil {
+ return false
+ }
+ for _, block := range fn.Blocks {
+ if len(block.Instrs) == 0 {
+ continue
+ }
+ ins := block.Instrs[len(block.Instrs)-1]
+ ret, ok := ins.(*ssa.Return)
+ if !ok {
+ continue
+ }
+ v := ret.Results[len(ret.Results)-1]
+ c, ok := v.(*ssa.Const)
+ if !ok {
+ return false
+ }
+ if !c.IsNil() {
+ return false
+ }
+ }
+ return true
+}
diff --git a/vendor/honnef.co/go/tools/functions/loops.go b/vendor/honnef.co/go/tools/functions/loops.go
new file mode 100644
index 0000000..63011cf
--- /dev/null
+++ b/vendor/honnef.co/go/tools/functions/loops.go
@@ -0,0 +1,50 @@
+package functions
+
+import "honnef.co/go/tools/ssa"
+
+type Loop map[*ssa.BasicBlock]bool
+
+func findLoops(fn *ssa.Function) []Loop {
+ if fn.Blocks == nil {
+ return nil
+ }
+ tree := fn.DomPreorder()
+ var sets []Loop
+ for _, h := range tree {
+ for _, n := range h.Preds {
+ if !h.Dominates(n) {
+ continue
+ }
+ // n is a back-edge to h
+ // h is the loop header
+ if n == h {
+ sets = append(sets, Loop{n: true})
+ continue
+ }
+ set := Loop{h: true, n: true}
+ for _, b := range allPredsBut(n, h, nil) {
+ set[b] = true
+ }
+ sets = append(sets, set)
+ }
+ }
+ return sets
+}
+
+func allPredsBut(b, but *ssa.BasicBlock, list []*ssa.BasicBlock) []*ssa.BasicBlock {
+outer:
+ for _, pred := range b.Preds {
+ if pred == but {
+ continue
+ }
+ for _, p := range list {
+ // TODO improve big-o complexity of this function
+ if pred == p {
+ continue outer
+ }
+ }
+ list = append(list, pred)
+ list = allPredsBut(pred, but, list)
+ }
+ return list
+}
diff --git a/vendor/honnef.co/go/tools/functions/pure.go b/vendor/honnef.co/go/tools/functions/pure.go
new file mode 100644
index 0000000..d1c4d03
--- /dev/null
+++ b/vendor/honnef.co/go/tools/functions/pure.go
@@ -0,0 +1,123 @@
+package functions
+
+import (
+ "go/token"
+ "go/types"
+
+ "honnef.co/go/tools/callgraph"
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
+)
+
+// IsStub reports whether a function is a stub. A function is
+// considered a stub if it has no instructions or exactly one
+// instruction, which must be either returning only constant values or
+// a panic.
+func (d *Descriptions) IsStub(fn *ssa.Function) bool {
+ if len(fn.Blocks) == 0 {
+ return true
+ }
+ if len(fn.Blocks) > 1 {
+ return false
+ }
+ instrs := lint.FilterDebug(fn.Blocks[0].Instrs)
+ if len(instrs) != 1 {
+ return false
+ }
+
+ switch instrs[0].(type) {
+ case *ssa.Return:
+ // Since this is the only instruction, the return value must
+ // be a constant. We consider all constants as stubs, not just
+ // the zero value. This does not, unfortunately, cover zero
+ // initialised structs, as these cause additional
+ // instructions.
+ return true
+ case *ssa.Panic:
+ return true
+ default:
+ return false
+ }
+}
+
+func (d *Descriptions) IsPure(fn *ssa.Function) bool {
+ if fn.Signature.Results().Len() == 0 {
+ // A function with no return values is empty or is doing some
+ // work we cannot see (for example because of build tags);
+ // don't consider it pure.
+ return false
+ }
+
+ for _, param := range fn.Params {
+ if _, ok := param.Type().Underlying().(*types.Basic); !ok {
+ return false
+ }
+ }
+
+ if fn.Blocks == nil {
+ return false
+ }
+ checkCall := func(common *ssa.CallCommon) bool {
+ if common.IsInvoke() {
+ return false
+ }
+ builtin, ok := common.Value.(*ssa.Builtin)
+ if !ok {
+ if common.StaticCallee() != fn {
+ if common.StaticCallee() == nil {
+ return false
+ }
+ // TODO(dh): ideally, IsPure wouldn't be responsible
+ // for avoiding infinite recursion, but
+ // FunctionDescriptions would be.
+ node := d.CallGraph.CreateNode(common.StaticCallee())
+ if callgraph.PathSearch(node, func(other *callgraph.Node) bool {
+ return other.Func == fn
+ }) != nil {
+ return false
+ }
+ if !d.Get(common.StaticCallee()).Pure {
+ return false
+ }
+ }
+ } else {
+ switch builtin.Name() {
+ case "len", "cap", "make", "new":
+ default:
+ return false
+ }
+ }
+ return true
+ }
+ for _, b := range fn.Blocks {
+ for _, ins := range b.Instrs {
+ switch ins := ins.(type) {
+ case *ssa.Call:
+ if !checkCall(ins.Common()) {
+ return false
+ }
+ case *ssa.Defer:
+ if !checkCall(&ins.Call) {
+ return false
+ }
+ case *ssa.Select:
+ return false
+ case *ssa.Send:
+ return false
+ case *ssa.Go:
+ return false
+ case *ssa.Panic:
+ return false
+ case *ssa.Store:
+ return false
+ case *ssa.FieldAddr:
+ return false
+ case *ssa.UnOp:
+ if ins.Op == token.MUL || ins.Op == token.AND {
+ return false
+ }
+ }
+ }
+ }
+ return true
+}
diff --git a/vendor/honnef.co/go/tools/functions/terminates.go b/vendor/honnef.co/go/tools/functions/terminates.go
new file mode 100644
index 0000000..65f9e16
--- /dev/null
+++ b/vendor/honnef.co/go/tools/functions/terminates.go
@@ -0,0 +1,24 @@
+package functions
+
+import "honnef.co/go/tools/ssa"
+
+// terminates reports whether fn is supposed to return, that is if it
+// has at least one theoretic path that returns from the function.
+// Explicit panics do not count as terminating.
+func terminates(fn *ssa.Function) bool {
+ if fn.Blocks == nil {
+ // assuming that a function terminates is the conservative
+ // choice
+ return true
+ }
+
+ for _, block := range fn.Blocks {
+ if len(block.Instrs) == 0 {
+ continue
+ }
+ if _, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return); ok {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/honnef.co/go/tools/gcsizes/LICENSE b/vendor/honnef.co/go/tools/gcsizes/LICENSE
new file mode 100644
index 0000000..6a66aea
--- /dev/null
+++ b/vendor/honnef.co/go/tools/gcsizes/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go
new file mode 100644
index 0000000..5f62fc2
--- /dev/null
+++ b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go
@@ -0,0 +1,70 @@
+package sharedcheck
+
+import (
+ "go/ast"
+ "go/types"
+
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
+)
+
+func CheckRangeStringRunes(nodeFns map[ast.Node]*ssa.Function, j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ rng, ok := node.(*ast.RangeStmt)
+ if !ok || !lint.IsBlank(rng.Key) {
+ return true
+ }
+ ssafn := nodeFns[rng]
+ if ssafn == nil {
+ return true
+ }
+ v, _ := ssafn.ValueForExpr(rng.X)
+
+ // Check that we're converting from string to []rune
+ val, _ := v.(*ssa.Convert)
+ if val == nil {
+ return true
+ }
+ Tsrc, ok := val.X.Type().(*types.Basic)
+ if !ok || Tsrc.Kind() != types.String {
+ return true
+ }
+ Tdst, ok := val.Type().(*types.Slice)
+ if !ok {
+ return true
+ }
+ TdstElem, ok := Tdst.Elem().(*types.Basic)
+ if !ok || TdstElem.Kind() != types.Int32 {
+ return true
+ }
+
+ // Check that the result of the conversion is only used to
+ // range over
+ refs := val.Referrers()
+ if refs == nil {
+ return true
+ }
+
+ // Expect two refs: one for obtaining the length of the slice,
+ // one for accessing the elements
+ if len(lint.FilterDebug(*refs)) != 2 {
+ // TODO(dh): right now, we check that only one place
+ // refers to our slice. This will miss cases such as
+ // ranging over the slice twice. Ideally, we'd ensure that
+ // the slice is only used for ranging over (without
+ // accessing the key), but that is harder to do because in
+ // SSA form, ranging over a slice looks like an ordinary
+ // loop with index increments and slice accesses. We'd
+ // have to look at the associated AST node to check that
+ // it's a range statement.
+ return true
+ }
+
+ j.Errorf(rng, "should range over string, not []rune(string)")
+
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
diff --git a/vendor/honnef.co/go/tools/lint/LICENSE b/vendor/honnef.co/go/tools/lint/LICENSE
new file mode 100644
index 0000000..796130a
--- /dev/null
+++ b/vendor/honnef.co/go/tools/lint/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2013 The Go Authors. All rights reserved.
+Copyright (c) 2016 Dominik Honnef. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/honnef.co/go/tools/lint/lint.go b/vendor/honnef.co/go/tools/lint/lint.go
new file mode 100644
index 0000000..75a5198
--- /dev/null
+++ b/vendor/honnef.co/go/tools/lint/lint.go
@@ -0,0 +1,844 @@
+// Copyright (c) 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package lint provides the foundation for tools like gosimple.
+package lint // import "honnef.co/go/tools/lint"
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/constant"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+ "sync"
+ "unicode"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/loader"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/ssa/ssautil"
+)
+
+type Job struct {
+ Program *Program
+
+ checker string
+ check string
+ problems []Problem
+}
+
+type Ignore interface {
+ Match(p Problem) bool
+}
+
+type LineIgnore struct {
+ File string
+ Line int
+ Checks []string
+ matched bool
+ pos token.Pos
+}
+
+func (li *LineIgnore) Match(p Problem) bool {
+ if p.Position.Filename != li.File || p.Position.Line != li.Line {
+ return false
+ }
+ for _, c := range li.Checks {
+ if m, _ := filepath.Match(c, p.Check); m {
+ li.matched = true
+ return true
+ }
+ }
+ return false
+}
+
+func (li *LineIgnore) String() string {
+ matched := "not matched"
+ if li.matched {
+ matched = "matched"
+ }
+ return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched)
+}
+
+type FileIgnore struct {
+ File string
+ Checks []string
+}
+
+func (fi *FileIgnore) Match(p Problem) bool {
+ if p.Position.Filename != fi.File {
+ return false
+ }
+ for _, c := range fi.Checks {
+ if m, _ := filepath.Match(c, p.Check); m {
+ return true
+ }
+ }
+ return false
+}
+
+type GlobIgnore struct {
+ Pattern string
+ Checks []string
+}
+
+func (gi *GlobIgnore) Match(p Problem) bool {
+ if gi.Pattern != "*" {
+ pkgpath := p.Package.Path()
+ if strings.HasSuffix(pkgpath, "_test") {
+ pkgpath = pkgpath[:len(pkgpath)-len("_test")]
+ }
+ name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename))
+ if m, _ := filepath.Match(gi.Pattern, name); !m {
+ return false
+ }
+ }
+ for _, c := range gi.Checks {
+ if m, _ := filepath.Match(c, p.Check); m {
+ return true
+ }
+ }
+ return false
+}
+
+type Program struct {
+ SSA *ssa.Program
+ Prog *loader.Program
+ // TODO(dh): Rename to InitialPackages?
+ Packages []*Pkg
+ InitialFunctions []*ssa.Function
+ AllFunctions []*ssa.Function
+ Files []*ast.File
+ Info *types.Info
+ GoVersion int
+
+ tokenFileMap map[*token.File]*ast.File
+ astFileMap map[*ast.File]*Pkg
+}
+
+type Func func(*Job)
+
+// Problem represents a problem in some source code.
+type Problem struct {
+ pos token.Pos
+ Position token.Position // position in source file
+ Text string // the prose that describes the problem
+ Check string
+ Checker string
+ Package *types.Package
+ Ignored bool
+}
+
+func (p *Problem) String() string {
+ if p.Check == "" {
+ return p.Text
+ }
+ return fmt.Sprintf("%s (%s)", p.Text, p.Check)
+}
+
+type Checker interface {
+ Name() string
+ Prefix() string
+ Init(*Program)
+ Funcs() map[string]Func
+}
+
+// A Linter lints Go source code.
+type Linter struct {
+ Checker Checker
+ Ignores []Ignore
+ GoVersion int
+ ReturnIgnored bool
+
+ automaticIgnores []Ignore
+}
+
+func (l *Linter) ignore(p Problem) bool {
+ ignored := false
+ for _, ig := range l.automaticIgnores {
+ // We cannot short-circuit these, as we want to record, for
+ // each ignore, whether it matched or not.
+ if ig.Match(p) {
+ ignored = true
+ }
+ }
+ if ignored {
+ // no need to execute other ignores if we've already had a
+ // match.
+ return true
+ }
+ for _, ig := range l.Ignores {
+ // We can short-circuit here, as we aren't tracking any
+ // information.
+ if ig.Match(p) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (prog *Program) File(node Positioner) *ast.File {
+ return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())]
+}
+
+func (j *Job) File(node Positioner) *ast.File {
+ return j.Program.File(node)
+}
+
+// TODO(dh): switch to sort.Slice when Go 1.9 lands.
+type byPosition struct {
+ fset *token.FileSet
+ ps []Problem
+}
+
+func (ps byPosition) Len() int {
+ return len(ps.ps)
+}
+
+func (ps byPosition) Less(i int, j int) bool {
+ pi, pj := ps.ps[i].Position, ps.ps[j].Position
+
+ if pi.Filename != pj.Filename {
+ return pi.Filename < pj.Filename
+ }
+ if pi.Line != pj.Line {
+ return pi.Line < pj.Line
+ }
+ if pi.Column != pj.Column {
+ return pi.Column < pj.Column
+ }
+
+ return ps.ps[i].Text < ps.ps[j].Text
+}
+
+func (ps byPosition) Swap(i int, j int) {
+ ps.ps[i], ps.ps[j] = ps.ps[j], ps.ps[i]
+}
+
+func parseDirective(s string) (cmd string, args []string) {
+ if !strings.HasPrefix(s, "//lint:") {
+ return "", nil
+ }
+ s = strings.TrimPrefix(s, "//lint:")
+ fields := strings.Split(s, " ")
+ return fields[0], fields[1:]
+}
+
+func (l *Linter) Lint(lprog *loader.Program, conf *loader.Config) []Problem {
+ ssaprog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
+ ssaprog.Build()
+ pkgMap := map[*ssa.Package]*Pkg{}
+ var pkgs []*Pkg
+ for _, pkginfo := range lprog.InitialPackages() {
+ ssapkg := ssaprog.Package(pkginfo.Pkg)
+ var bp *build.Package
+ if len(pkginfo.Files) != 0 {
+ path := lprog.Fset.Position(pkginfo.Files[0].Pos()).Filename
+ dir := filepath.Dir(path)
+ var err error
+ ctx := conf.Build
+ if ctx == nil {
+ ctx = &build.Default
+ }
+ bp, err = ctx.ImportDir(dir, 0)
+ if err != nil {
+ // shouldn't happen
+ }
+ }
+ pkg := &Pkg{
+ Package: ssapkg,
+ Info: pkginfo,
+ BuildPkg: bp,
+ }
+ pkgMap[ssapkg] = pkg
+ pkgs = append(pkgs, pkg)
+ }
+ prog := &Program{
+ SSA: ssaprog,
+ Prog: lprog,
+ Packages: pkgs,
+ Info: &types.Info{},
+ GoVersion: l.GoVersion,
+ tokenFileMap: map[*token.File]*ast.File{},
+ astFileMap: map[*ast.File]*Pkg{},
+ }
+
+ initial := map[*types.Package]struct{}{}
+ for _, pkg := range pkgs {
+ initial[pkg.Info.Pkg] = struct{}{}
+ }
+ for fn := range ssautil.AllFunctions(ssaprog) {
+ if fn.Pkg == nil {
+ continue
+ }
+ prog.AllFunctions = append(prog.AllFunctions, fn)
+ if _, ok := initial[fn.Pkg.Pkg]; ok {
+ prog.InitialFunctions = append(prog.InitialFunctions, fn)
+ }
+ }
+ for _, pkg := range pkgs {
+ prog.Files = append(prog.Files, pkg.Info.Files...)
+
+ ssapkg := ssaprog.Package(pkg.Info.Pkg)
+ for _, f := range pkg.Info.Files {
+ prog.astFileMap[f] = pkgMap[ssapkg]
+ }
+ }
+
+ for _, pkginfo := range lprog.AllPackages {
+ for _, f := range pkginfo.Files {
+ tf := lprog.Fset.File(f.Pos())
+ prog.tokenFileMap[tf] = f
+ }
+ }
+
+ var out []Problem
+ l.automaticIgnores = nil
+ for _, pkginfo := range lprog.InitialPackages() {
+ for _, f := range pkginfo.Files {
+ cm := ast.NewCommentMap(lprog.Fset, f, f.Comments)
+ for node, cgs := range cm {
+ for _, cg := range cgs {
+ for _, c := range cg.List {
+ if !strings.HasPrefix(c.Text, "//lint:") {
+ continue
+ }
+ cmd, args := parseDirective(c.Text)
+ switch cmd {
+ case "ignore", "file-ignore":
+ if len(args) < 2 {
+ // FIXME(dh): this causes duplicated warnings when using megacheck
+ p := Problem{
+ pos: c.Pos(),
+ Position: prog.DisplayPosition(c.Pos()),
+ Text: "malformed linter directive; missing the required reason field?",
+ Check: "",
+ Checker: l.Checker.Name(),
+ Package: nil,
+ }
+ out = append(out, p)
+ continue
+ }
+ default:
+ // unknown directive, ignore
+ continue
+ }
+ checks := strings.Split(args[0], ",")
+ pos := prog.DisplayPosition(node.Pos())
+ var ig Ignore
+ switch cmd {
+ case "ignore":
+ ig = &LineIgnore{
+ File: pos.Filename,
+ Line: pos.Line,
+ Checks: checks,
+ pos: c.Pos(),
+ }
+ case "file-ignore":
+ ig = &FileIgnore{
+ File: pos.Filename,
+ Checks: checks,
+ }
+ }
+ l.automaticIgnores = append(l.automaticIgnores, ig)
+ }
+ }
+ }
+ }
+ }
+
+ sizes := struct {
+ types int
+ defs int
+ uses int
+ implicits int
+ selections int
+ scopes int
+ }{}
+ for _, pkg := range pkgs {
+ sizes.types += len(pkg.Info.Info.Types)
+ sizes.defs += len(pkg.Info.Info.Defs)
+ sizes.uses += len(pkg.Info.Info.Uses)
+ sizes.implicits += len(pkg.Info.Info.Implicits)
+ sizes.selections += len(pkg.Info.Info.Selections)
+ sizes.scopes += len(pkg.Info.Info.Scopes)
+ }
+ prog.Info.Types = make(map[ast.Expr]types.TypeAndValue, sizes.types)
+ prog.Info.Defs = make(map[*ast.Ident]types.Object, sizes.defs)
+ prog.Info.Uses = make(map[*ast.Ident]types.Object, sizes.uses)
+ prog.Info.Implicits = make(map[ast.Node]types.Object, sizes.implicits)
+ prog.Info.Selections = make(map[*ast.SelectorExpr]*types.Selection, sizes.selections)
+ prog.Info.Scopes = make(map[ast.Node]*types.Scope, sizes.scopes)
+ for _, pkg := range pkgs {
+ for k, v := range pkg.Info.Info.Types {
+ prog.Info.Types[k] = v
+ }
+ for k, v := range pkg.Info.Info.Defs {
+ prog.Info.Defs[k] = v
+ }
+ for k, v := range pkg.Info.Info.Uses {
+ prog.Info.Uses[k] = v
+ }
+ for k, v := range pkg.Info.Info.Implicits {
+ prog.Info.Implicits[k] = v
+ }
+ for k, v := range pkg.Info.Info.Selections {
+ prog.Info.Selections[k] = v
+ }
+ for k, v := range pkg.Info.Info.Scopes {
+ prog.Info.Scopes[k] = v
+ }
+ }
+ l.Checker.Init(prog)
+
+ funcs := l.Checker.Funcs()
+ var keys []string
+ for k := range funcs {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+
+ var jobs []*Job
+ for _, k := range keys {
+ j := &Job{
+ Program: prog,
+ checker: l.Checker.Name(),
+ check: k,
+ }
+ jobs = append(jobs, j)
+ }
+ wg := &sync.WaitGroup{}
+ for _, j := range jobs {
+ wg.Add(1)
+ go func(j *Job) {
+ defer wg.Done()
+ fn := funcs[j.check]
+ if fn == nil {
+ return
+ }
+ fn(j)
+ }(j)
+ }
+ wg.Wait()
+
+ for _, j := range jobs {
+ for _, p := range j.problems {
+ p.Ignored = l.ignore(p)
+ if l.ReturnIgnored || !p.Ignored {
+ out = append(out, p)
+ }
+ }
+ }
+
+ for _, ig := range l.automaticIgnores {
+ ig, ok := ig.(*LineIgnore)
+ if !ok {
+ continue
+ }
+ if ig.matched {
+ continue
+ }
+ for _, c := range ig.Checks {
+ idx := strings.IndexFunc(c, func(r rune) bool {
+ return unicode.IsNumber(r)
+ })
+ if idx == -1 {
+ // malformed check name, backing out
+ continue
+ }
+ if c[:idx] != l.Checker.Prefix() {
+ // not for this checker
+ continue
+ }
+ p := Problem{
+ pos: ig.pos,
+ Position: prog.DisplayPosition(ig.pos),
+ Text: "this linter directive didn't match anything; should it be removed?",
+ Check: "",
+ Checker: l.Checker.Name(),
+ Package: nil,
+ }
+ out = append(out, p)
+ }
+ }
+
+ sort.Sort(byPosition{lprog.Fset, out})
+ return out
+}
+
+// Pkg represents a package being linted.
+type Pkg struct {
+ *ssa.Package
+ Info *loader.PackageInfo
+ BuildPkg *build.Package
+}
+
+type packager interface {
+ Package() *ssa.Package
+}
+
+func IsExample(fn *ssa.Function) bool {
+ if !strings.HasPrefix(fn.Name(), "Example") {
+ return false
+ }
+ f := fn.Prog.Fset.File(fn.Pos())
+ if f == nil {
+ return false
+ }
+ return strings.HasSuffix(f.Name(), "_test.go")
+}
+
+func (j *Job) IsInTest(node Positioner) bool {
+ f := j.Program.SSA.Fset.File(node.Pos())
+ return f != nil && strings.HasSuffix(f.Name(), "_test.go")
+}
+
+func (j *Job) IsInMain(node Positioner) bool {
+ if node, ok := node.(packager); ok {
+ return node.Package().Pkg.Name() == "main"
+ }
+ pkg := j.NodePackage(node)
+ if pkg == nil {
+ return false
+ }
+ return pkg.Pkg.Name() == "main"
+}
+
+type Positioner interface {
+ Pos() token.Pos
+}
+
+func (prog *Program) DisplayPosition(p token.Pos) token.Position {
+ // The //line compiler directive can be used to change the file
+ // name and line numbers associated with code. This can, for
+ // example, be used by code generation tools. The most prominent
+ // example is 'go tool cgo', which uses //line directives to refer
+ // back to the original source code.
+ //
+ // In the context of our linters, we need to treat these
+ // directives differently depending on context. For cgo files, we
+ // want to honour the directives, so that line numbers are
+ // adjusted correctly. For all other files, we want to ignore the
+ // directives, so that problems are reported at their actual
+ // position and not, for example, a yacc grammar file. This also
+ // affects the ignore mechanism, since it operates on the position
+ // information stored within problems. With this implementation, a
+ // user will ignore foo.go, not foo.y
+
+ pkg := prog.astFileMap[prog.tokenFileMap[prog.Prog.Fset.File(p)]]
+ bp := pkg.BuildPkg
+ adjPos := prog.Prog.Fset.Position(p)
+ if bp == nil {
+ // couldn't find the package for some reason (deleted? faulty
+ // file system?)
+ return adjPos
+ }
+ base := filepath.Base(adjPos.Filename)
+ for _, f := range bp.CgoFiles {
+ if f == base {
+ // this is a cgo file, use the adjusted position
+ return adjPos
+ }
+ }
+ // not a cgo file, ignore //line directives
+ return prog.Prog.Fset.PositionFor(p, false)
+}
+
+func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem {
+ tf := j.Program.SSA.Fset.File(n.Pos())
+ f := j.Program.tokenFileMap[tf]
+ pkg := j.Program.astFileMap[f].Pkg
+
+ pos := j.Program.DisplayPosition(n.Pos())
+ problem := Problem{
+ pos: n.Pos(),
+ Position: pos,
+ Text: fmt.Sprintf(format, args...),
+ Check: j.check,
+ Checker: j.checker,
+ Package: pkg,
+ }
+ j.problems = append(j.problems, problem)
+ return &j.problems[len(j.problems)-1]
+}
+
+func (j *Job) Render(x interface{}) string {
+ fset := j.Program.SSA.Fset
+ var buf bytes.Buffer
+ if err := printer.Fprint(&buf, fset, x); err != nil {
+ panic(err)
+ }
+ return buf.String()
+}
+
+func (j *Job) RenderArgs(args []ast.Expr) string {
+ var ss []string
+ for _, arg := range args {
+ ss = append(ss, j.Render(arg))
+ }
+ return strings.Join(ss, ", ")
+}
+
+func IsIdent(expr ast.Expr, ident string) bool {
+ id, ok := expr.(*ast.Ident)
+ return ok && id.Name == ident
+}
+
+// isBlank returns whether id is the blank identifier "_".
+// If id == nil, the answer is false.
+func IsBlank(id ast.Expr) bool {
+ ident, ok := id.(*ast.Ident)
+ return ok && ident.Name == "_"
+}
+
+func IsZero(expr ast.Expr) bool {
+ lit, ok := expr.(*ast.BasicLit)
+ return ok && lit.Kind == token.INT && lit.Value == "0"
+}
+
+func (j *Job) IsNil(expr ast.Expr) bool {
+ return j.Program.Info.Types[expr].IsNil()
+}
+
+func (j *Job) BoolConst(expr ast.Expr) bool {
+ val := j.Program.Info.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
+ return constant.BoolVal(val)
+}
+
+func (j *Job) IsBoolConst(expr ast.Expr) bool {
+ // We explicitly don't support typed bools because more often than
+ // not, custom bool types are used as binary enums and the
+ // explicit comparison is desired.
+
+ ident, ok := expr.(*ast.Ident)
+ if !ok {
+ return false
+ }
+ obj := j.Program.Info.ObjectOf(ident)
+ c, ok := obj.(*types.Const)
+ if !ok {
+ return false
+ }
+ basic, ok := c.Type().(*types.Basic)
+ if !ok {
+ return false
+ }
+ if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
+ return false
+ }
+ return true
+}
+
+func (j *Job) ExprToInt(expr ast.Expr) (int64, bool) {
+ tv := j.Program.Info.Types[expr]
+ if tv.Value == nil {
+ return 0, false
+ }
+ if tv.Value.Kind() != constant.Int {
+ return 0, false
+ }
+ return constant.Int64Val(tv.Value)
+}
+
+func (j *Job) ExprToString(expr ast.Expr) (string, bool) {
+ val := j.Program.Info.Types[expr].Value
+ if val == nil {
+ return "", false
+ }
+ if val.Kind() != constant.String {
+ return "", false
+ }
+ return constant.StringVal(val), true
+}
+
+func (j *Job) NodePackage(node Positioner) *Pkg {
+ f := j.File(node)
+ return j.Program.astFileMap[f]
+}
+
+func IsGenerated(f *ast.File) bool {
+ comments := f.Comments
+ if len(comments) > 0 {
+ comment := comments[0].Text()
+ return strings.Contains(comment, "Code generated by") ||
+ strings.Contains(comment, "DO NOT EDIT")
+ }
+ return false
+}
+
+func Preamble(f *ast.File) string {
+ cutoff := f.Package
+ if f.Doc != nil {
+ cutoff = f.Doc.Pos()
+ }
+ var out []string
+ for _, cmt := range f.Comments {
+ if cmt.Pos() >= cutoff {
+ break
+ }
+ out = append(out, cmt.Text())
+ }
+ return strings.Join(out, "\n")
+}
+
+func IsPointerLike(T types.Type) bool {
+ switch T := T.Underlying().(type) {
+ case *types.Interface, *types.Chan, *types.Map, *types.Pointer:
+ return true
+ case *types.Basic:
+ return T.Kind() == types.UnsafePointer
+ }
+ return false
+}
+
+func (j *Job) IsGoVersion(minor int) bool {
+ return j.Program.GoVersion >= minor
+}
+
+func (j *Job) IsCallToAST(node ast.Node, name string) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return false
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+ fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func)
+ return ok && fn.FullName() == name
+}
+
+func (j *Job) IsCallToAnyAST(node ast.Node, names ...string) bool {
+ for _, name := range names {
+ if j.IsCallToAST(node, name) {
+ return true
+ }
+ }
+ return false
+}
+
+func CallName(call *ssa.CallCommon) string {
+ if call.IsInvoke() {
+ return ""
+ }
+ switch v := call.Value.(type) {
+ case *ssa.Function:
+ fn, ok := v.Object().(*types.Func)
+ if !ok {
+ return ""
+ }
+ return fn.FullName()
+ case *ssa.Builtin:
+ return v.Name()
+ }
+ return ""
+}
+
+func IsCallTo(call *ssa.CallCommon, name string) bool {
+ return CallName(call) == name
+}
+
+func FilterDebug(instr []ssa.Instruction) []ssa.Instruction {
+ var out []ssa.Instruction
+ for _, ins := range instr {
+ if _, ok := ins.(*ssa.DebugRef); !ok {
+ out = append(out, ins)
+ }
+ }
+ return out
+}
+
+func NodeFns(pkgs []*Pkg) map[ast.Node]*ssa.Function {
+ out := map[ast.Node]*ssa.Function{}
+
+ wg := &sync.WaitGroup{}
+ chNodeFns := make(chan map[ast.Node]*ssa.Function, runtime.NumCPU()*2)
+ for _, pkg := range pkgs {
+ pkg := pkg
+ wg.Add(1)
+ go func() {
+ m := map[ast.Node]*ssa.Function{}
+ for _, f := range pkg.Info.Files {
+ ast.Walk(&globalVisitor{m, pkg, f}, f)
+ }
+ chNodeFns <- m
+ wg.Done()
+ }()
+ }
+ go func() {
+ wg.Wait()
+ close(chNodeFns)
+ }()
+
+ for nodeFns := range chNodeFns {
+ for k, v := range nodeFns {
+ out[k] = v
+ }
+ }
+
+ return out
+}
+
+type globalVisitor struct {
+ m map[ast.Node]*ssa.Function
+ pkg *Pkg
+ f *ast.File
+}
+
+func (v *globalVisitor) Visit(node ast.Node) ast.Visitor {
+ switch node := node.(type) {
+ case *ast.CallExpr:
+ v.m[node] = v.pkg.Func("init")
+ return v
+ case *ast.FuncDecl, *ast.FuncLit:
+ nv := &fnVisitor{v.m, v.f, v.pkg, nil}
+ return nv.Visit(node)
+ default:
+ return v
+ }
+}
+
+type fnVisitor struct {
+ m map[ast.Node]*ssa.Function
+ f *ast.File
+ pkg *Pkg
+ ssafn *ssa.Function
+}
+
+func (v *fnVisitor) Visit(node ast.Node) ast.Visitor {
+ switch node := node.(type) {
+ case *ast.FuncDecl:
+ var ssafn *ssa.Function
+ ssafn = v.pkg.Prog.FuncValue(v.pkg.Info.ObjectOf(node.Name).(*types.Func))
+ v.m[node] = ssafn
+ if ssafn == nil {
+ return nil
+ }
+ return &fnVisitor{v.m, v.f, v.pkg, ssafn}
+ case *ast.FuncLit:
+ var ssafn *ssa.Function
+ path, _ := astutil.PathEnclosingInterval(v.f, node.Pos(), node.Pos())
+ ssafn = ssa.EnclosingFunction(v.pkg.Package, path)
+ v.m[node] = ssafn
+ if ssafn == nil {
+ return nil
+ }
+ return &fnVisitor{v.m, v.f, v.pkg, ssafn}
+ case nil:
+ return nil
+ default:
+ v.m[node] = v.ssafn
+ return v
+ }
+}
diff --git a/vendor/honnef.co/go/tools/lint/lintutil/util.go b/vendor/honnef.co/go/tools/lint/lintutil/util.go
new file mode 100644
index 0000000..0bb1426
--- /dev/null
+++ b/vendor/honnef.co/go/tools/lint/lintutil/util.go
@@ -0,0 +1,349 @@
+// Copyright (c) 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package lintutil provides helpers for writing linter command lines.
+package lintutil // import "honnef.co/go/tools/lint/lintutil"
+
+import (
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/version"
+
+ "github.com/kisielk/gotool"
+ "golang.org/x/tools/go/loader"
+)
+
+type OutputFormatter interface {
+ Format(p lint.Problem)
+}
+
+type TextOutput struct {
+ w io.Writer
+}
+
+func (o TextOutput) Format(p lint.Problem) {
+ fmt.Fprintf(o.w, "%v: %s\n", relativePositionString(p.Position), p.String())
+}
+
+type JSONOutput struct {
+ w io.Writer
+}
+
+func (o JSONOutput) Format(p lint.Problem) {
+ type location struct {
+ File string `json:"file"`
+ Line int `json:"line"`
+ Column int `json:"column"`
+ }
+ jp := struct {
+ Checker string `json:"checker"`
+ Code string `json:"code"`
+ Severity string `json:"severity,omitempty"`
+ Location location `json:"location"`
+ Message string `json:"message"`
+ Ignored bool `json:"ignored"`
+ }{
+ p.Checker,
+ p.Check,
+ "", // TODO(dh): support severity
+ location{
+ p.Position.Filename,
+ p.Position.Line,
+ p.Position.Column,
+ },
+ p.Text,
+ p.Ignored,
+ }
+ _ = json.NewEncoder(o.w).Encode(jp)
+}
+func usage(name string, flags *flag.FlagSet) func() {
+ return func() {
+ fmt.Fprintf(os.Stderr, "Usage of %s:\n", name)
+ fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name)
+ fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name)
+ fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name)
+ fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name)
+ fmt.Fprintf(os.Stderr, "Flags:\n")
+ flags.PrintDefaults()
+ }
+}
+
+type runner struct {
+ checker lint.Checker
+ tags []string
+ ignores []lint.Ignore
+ version int
+ returnIgnored bool
+}
+
+func resolveRelative(importPaths []string, tags []string) (goFiles bool, err error) {
+ if len(importPaths) == 0 {
+ return false, nil
+ }
+ if strings.HasSuffix(importPaths[0], ".go") {
+ // User is specifying a package in terms of .go files, don't resolve
+ return true, nil
+ }
+ wd, err := os.Getwd()
+ if err != nil {
+ return false, err
+ }
+ ctx := build.Default
+ ctx.BuildTags = tags
+ for i, path := range importPaths {
+ bpkg, err := ctx.Import(path, wd, build.FindOnly)
+ if err != nil {
+ return false, fmt.Errorf("can't load package %q: %v", path, err)
+ }
+ importPaths[i] = bpkg.ImportPath
+ }
+ return false, nil
+}
+
+func parseIgnore(s string) ([]lint.Ignore, error) {
+ var out []lint.Ignore
+ if len(s) == 0 {
+ return nil, nil
+ }
+ for _, part := range strings.Fields(s) {
+ p := strings.Split(part, ":")
+ if len(p) != 2 {
+ return nil, errors.New("malformed ignore string")
+ }
+ path := p[0]
+ checks := strings.Split(p[1], ",")
+ out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks})
+ }
+ return out, nil
+}
+
+type versionFlag int
+
+func (v *versionFlag) String() string {
+ return fmt.Sprintf("1.%d", *v)
+}
+
+func (v *versionFlag) Set(s string) error {
+ if len(s) < 3 {
+ return errors.New("invalid Go version")
+ }
+ if s[0] != '1' {
+ return errors.New("invalid Go version")
+ }
+ if s[1] != '.' {
+ return errors.New("invalid Go version")
+ }
+ i, err := strconv.Atoi(s[2:])
+ *v = versionFlag(i)
+ return err
+}
+
+func (v *versionFlag) Get() interface{} {
+ return int(*v)
+}
+
+func FlagSet(name string) *flag.FlagSet {
+ flags := flag.NewFlagSet("", flag.ExitOnError)
+ flags.Usage = usage(name, flags)
+ flags.Float64("min_confidence", 0, "Deprecated; use -ignore instead")
+ flags.String("tags", "", "List of `build tags`")
+ flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'")
+ flags.Bool("tests", true, "Include tests")
+ flags.Bool("version", false, "Print version and exit")
+ flags.Bool("show-ignored", false, "Don't filter ignored problems")
+ flags.String("f", "text", "Output `format` (valid choices are 'text' and 'json')")
+
+ tags := build.Default.ReleaseTags
+ v := tags[len(tags)-1][2:]
+ version := new(versionFlag)
+ if err := version.Set(v); err != nil {
+ panic(fmt.Sprintf("internal error: %s", err))
+ }
+
+ flags.Var(version, "go", "Target Go `version` in the format '1.x'")
+ return flags
+}
+
+type CheckerConfig struct {
+ Checker lint.Checker
+ ExitNonZero bool
+}
+
+func ProcessFlagSet(confs []CheckerConfig, fs *flag.FlagSet) {
+ tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string)
+ ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string)
+ tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool)
+ goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int)
+ format := fs.Lookup("f").Value.(flag.Getter).Get().(string)
+ printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool)
+ showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool)
+
+ if printVersion {
+ version.Print()
+ os.Exit(0)
+ }
+
+ var cs []lint.Checker
+ for _, conf := range confs {
+ cs = append(cs, conf.Checker)
+ }
+ pss, err := Lint(cs, fs.Args(), &Options{
+ Tags: strings.Fields(tags),
+ LintTests: tests,
+ Ignores: ignore,
+ GoVersion: goVersion,
+ ReturnIgnored: showIgnored,
+ })
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+ }
+
+ var ps []lint.Problem
+ for _, p := range pss {
+ ps = append(ps, p...)
+ }
+
+ var f OutputFormatter
+ switch format {
+ case "text":
+ f = TextOutput{os.Stdout}
+ case "json":
+ f = JSONOutput{os.Stdout}
+ default:
+ fmt.Fprintf(os.Stderr, "unsupported output format %q\n", format)
+ os.Exit(2)
+ }
+
+ for _, p := range ps {
+ f.Format(p)
+ }
+ for i, p := range pss {
+ if len(p) != 0 && confs[i].ExitNonZero {
+ os.Exit(1)
+ }
+ }
+}
+
+type Options struct {
+ Tags []string
+ LintTests bool
+ Ignores string
+ GoVersion int
+ ReturnIgnored bool
+}
+
+func Lint(cs []lint.Checker, pkgs []string, opt *Options) ([][]lint.Problem, error) {
+ if opt == nil {
+ opt = &Options{}
+ }
+ ignores, err := parseIgnore(opt.Ignores)
+ if err != nil {
+ return nil, err
+ }
+ paths := gotool.ImportPaths(pkgs)
+ goFiles, err := resolveRelative(paths, opt.Tags)
+ if err != nil {
+ return nil, err
+ }
+ ctx := build.Default
+ ctx.BuildTags = opt.Tags
+ hadError := false
+ conf := &loader.Config{
+ Build: &ctx,
+ ParserMode: parser.ParseComments,
+ ImportPkgs: map[string]bool{},
+ TypeChecker: types.Config{
+ Error: func(err error) {
+ // Only print the first error found
+ if hadError {
+ return
+ }
+ hadError = true
+ fmt.Fprintln(os.Stderr, err)
+ },
+ },
+ }
+ if goFiles {
+ conf.CreateFromFilenames("adhoc", paths...)
+ } else {
+ for _, path := range paths {
+ conf.ImportPkgs[path] = opt.LintTests
+ }
+ }
+ lprog, err := conf.Load()
+ if err != nil {
+ return nil, err
+ }
+
+ var problems [][]lint.Problem
+ for _, c := range cs {
+ runner := &runner{
+ checker: c,
+ tags: opt.Tags,
+ ignores: ignores,
+ version: opt.GoVersion,
+ returnIgnored: opt.ReturnIgnored,
+ }
+ problems = append(problems, runner.lint(lprog, conf))
+ }
+ return problems, nil
+}
+
+func shortPath(path string) string {
+ cwd, err := os.Getwd()
+ if err != nil {
+ return path
+ }
+ if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) {
+ return rel
+ }
+ return path
+}
+
+func relativePositionString(pos token.Position) string {
+ s := shortPath(pos.Filename)
+ if pos.IsValid() {
+ if s != "" {
+ s += ":"
+ }
+ s += fmt.Sprintf("%d:%d", pos.Line, pos.Column)
+ }
+ if s == "" {
+ s = "-"
+ }
+ return s
+}
+
+func ProcessArgs(name string, cs []CheckerConfig, args []string) {
+ flags := FlagSet(name)
+ flags.Parse(args)
+
+ ProcessFlagSet(cs, flags)
+}
+
+func (runner *runner) lint(lprog *loader.Program, conf *loader.Config) []lint.Problem {
+ l := &lint.Linter{
+ Checker: runner.checker,
+ Ignores: runner.ignores,
+ GoVersion: runner.version,
+ ReturnIgnored: runner.returnIgnored,
+ }
+ return l.Lint(lprog, conf)
+}
diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go
new file mode 100644
index 0000000..47ee690
--- /dev/null
+++ b/vendor/honnef.co/go/tools/simple/lint.go
@@ -0,0 +1,1771 @@
+// Package simple contains a linter for Go source code.
+package simple // import "honnef.co/go/tools/simple"
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "reflect"
+ "strings"
+
+ "honnef.co/go/tools/internal/sharedcheck"
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+type Checker struct {
+ CheckGenerated bool
+ MS *typeutil.MethodSetCache
+
+ nodeFns map[ast.Node]*ssa.Function
+}
+
+func NewChecker() *Checker {
+ return &Checker{
+ MS: &typeutil.MethodSetCache{},
+ }
+}
+
+func (*Checker) Name() string { return "gosimple" }
+func (*Checker) Prefix() string { return "S" }
+
+func (c *Checker) Init(prog *lint.Program) {
+ c.nodeFns = lint.NodeFns(prog.Packages)
+}
+
+func (c *Checker) Funcs() map[string]lint.Func {
+ return map[string]lint.Func{
+ "S1000": c.LintSingleCaseSelect,
+ "S1001": c.LintLoopCopy,
+ "S1002": c.LintIfBoolCmp,
+ "S1003": c.LintStringsContains,
+ "S1004": c.LintBytesCompare,
+ "S1005": c.LintUnnecessaryBlank,
+ "S1006": c.LintForTrue,
+ "S1007": c.LintRegexpRaw,
+ "S1008": c.LintIfReturn,
+ "S1009": c.LintRedundantNilCheckWithLen,
+ "S1010": c.LintSlicing,
+ "S1011": c.LintLoopAppend,
+ "S1012": c.LintTimeSince,
+ "S1013": c.LintSimplerReturn,
+ "S1014": nil,
+ "S1015": nil,
+ "S1016": c.LintSimplerStructConversion,
+ "S1017": c.LintTrim,
+ "S1018": c.LintLoopSlide,
+ "S1019": c.LintMakeLenCap,
+ "S1020": c.LintAssertNotNil,
+ "S1021": c.LintDeclareAssign,
+ "S1022": nil,
+ "S1023": c.LintRedundantBreak,
+ "S1024": c.LintTimeUntil,
+ "S1025": c.LintRedundantSprintf,
+ "S1026": nil,
+ "S1027": nil,
+ "S1028": c.LintErrorsNewSprintf,
+ "S1029": c.LintRangeStringRunes,
+ "S1030": c.LintBytesBufferConversions,
+ "S1031": c.LintNilCheckAroundRange,
+ }
+}
+
+func (c *Checker) filterGenerated(files []*ast.File) []*ast.File {
+ if c.CheckGenerated {
+ return files
+ }
+ var out []*ast.File
+ for _, f := range files {
+ if !lint.IsGenerated(f) {
+ out = append(out, f)
+ }
+ }
+ return out
+}
+
+func (c *Checker) LintSingleCaseSelect(j *lint.Job) {
+ isSingleSelect := func(node ast.Node) bool {
+ v, ok := node.(*ast.SelectStmt)
+ if !ok {
+ return false
+ }
+ return len(v.Body.List) == 1
+ }
+
+ seen := map[ast.Node]struct{}{}
+ fn := func(node ast.Node) bool {
+ switch v := node.(type) {
+ case *ast.ForStmt:
+ if len(v.Body.List) != 1 {
+ return true
+ }
+ if !isSingleSelect(v.Body.List[0]) {
+ return true
+ }
+ if _, ok := v.Body.List[0].(*ast.SelectStmt).Body.List[0].(*ast.CommClause).Comm.(*ast.SendStmt); ok {
+ // Don't suggest using range for channel sends
+ return true
+ }
+ seen[v.Body.List[0]] = struct{}{}
+ j.Errorf(node, "should use for range instead of for { select {} }")
+ case *ast.SelectStmt:
+ if _, ok := seen[v]; ok {
+ return true
+ }
+ if !isSingleSelect(v) {
+ return true
+ }
+ j.Errorf(node, "should use a simple channel send/receive instead of select with a single case")
+ return true
+ }
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintLoopCopy(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return true
+ }
+
+ if loop.Key == nil {
+ return true
+ }
+ if len(loop.Body.List) != 1 {
+ return true
+ }
+ stmt, ok := loop.Body.List[0].(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 {
+ return true
+ }
+ lhs, ok := stmt.Lhs[0].(*ast.IndexExpr)
+ if !ok {
+ return true
+ }
+ if _, ok := j.Program.Info.TypeOf(lhs.X).(*types.Slice); !ok {
+ return true
+ }
+ lidx, ok := lhs.Index.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ key, ok := loop.Key.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if j.Program.Info.TypeOf(lhs) == nil || j.Program.Info.TypeOf(stmt.Rhs[0]) == nil {
+ return true
+ }
+ if j.Program.Info.ObjectOf(lidx) != j.Program.Info.ObjectOf(key) {
+ return true
+ }
+ if !types.Identical(j.Program.Info.TypeOf(lhs), j.Program.Info.TypeOf(stmt.Rhs[0])) {
+ return true
+ }
+ if _, ok := j.Program.Info.TypeOf(loop.X).(*types.Slice); !ok {
+ return true
+ }
+
+ if rhs, ok := stmt.Rhs[0].(*ast.IndexExpr); ok {
+ rx, ok := rhs.X.(*ast.Ident)
+ _ = rx
+ if !ok {
+ return true
+ }
+ ridx, ok := rhs.Index.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if j.Program.Info.ObjectOf(ridx) != j.Program.Info.ObjectOf(key) {
+ return true
+ }
+ } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok {
+ value, ok := loop.Value.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if j.Program.Info.ObjectOf(rhs) != j.Program.Info.ObjectOf(value) {
+ return true
+ }
+ } else {
+ return true
+ }
+ j.Errorf(loop, "should use copy() instead of a loop")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintIfBoolCmp(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ expr, ok := node.(*ast.BinaryExpr)
+ if !ok || (expr.Op != token.EQL && expr.Op != token.NEQ) {
+ return true
+ }
+ x := j.IsBoolConst(expr.X)
+ y := j.IsBoolConst(expr.Y)
+ if !x && !y {
+ return true
+ }
+ var other ast.Expr
+ var val bool
+ if x {
+ val = j.BoolConst(expr.X)
+ other = expr.Y
+ } else {
+ val = j.BoolConst(expr.Y)
+ other = expr.X
+ }
+ basic, ok := j.Program.Info.TypeOf(other).Underlying().(*types.Basic)
+ if !ok || basic.Kind() != types.Bool {
+ return true
+ }
+ op := ""
+ if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) {
+ op = "!"
+ }
+ r := op + j.Render(other)
+ l1 := len(r)
+ r = strings.TrimLeft(r, "!")
+ if (l1-len(r))%2 == 1 {
+ r = "!" + r
+ }
+ j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintBytesBufferConversions(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok || len(call.Args) != 1 {
+ return true
+ }
+
+ argCall, ok := call.Args[0].(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sel, ok := argCall.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+
+ typ := j.Program.Info.TypeOf(call.Fun)
+ if typ == types.Universe.Lookup("string").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).Bytes") {
+ j.Errorf(call, "should use %v.String() instead of %v", j.Render(sel.X), j.Render(call))
+ } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).String") {
+ j.Errorf(call, "should use %v.Bytes() instead of %v", j.Render(sel.X), j.Render(call))
+ }
+
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintStringsContains(j *lint.Job) {
+ // map of value to token to bool value
+ allowed := map[int64]map[token.Token]bool{
+ -1: {token.GTR: true, token.NEQ: true, token.EQL: false},
+ 0: {token.GEQ: true, token.LSS: false},
+ }
+ fn := func(node ast.Node) bool {
+ expr, ok := node.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ switch expr.Op {
+ case token.GEQ, token.GTR, token.NEQ, token.LSS, token.EQL:
+ default:
+ return true
+ }
+
+ value, ok := j.ExprToInt(expr.Y)
+ if !ok {
+ return true
+ }
+
+ allowedOps, ok := allowed[value]
+ if !ok {
+ return true
+ }
+ b, ok := allowedOps[expr.Op]
+ if !ok {
+ return true
+ }
+
+ call, ok := expr.X.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ pkgIdent, ok := sel.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ funIdent := sel.Sel
+ if pkgIdent.Name != "strings" && pkgIdent.Name != "bytes" {
+ return true
+ }
+ newFunc := ""
+ switch funIdent.Name {
+ case "IndexRune":
+ newFunc = "ContainsRune"
+ case "IndexAny":
+ newFunc = "ContainsAny"
+ case "Index":
+ newFunc = "Contains"
+ default:
+ return true
+ }
+
+ prefix := ""
+ if !b {
+ prefix = "!"
+ }
+ j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, j.RenderArgs(call.Args))
+
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintBytesCompare(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ expr, ok := node.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ if expr.Op != token.NEQ && expr.Op != token.EQL {
+ return true
+ }
+ call, ok := expr.X.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(call, "bytes.Compare") {
+ return true
+ }
+ value, ok := j.ExprToInt(expr.Y)
+ if !ok || value != 0 {
+ return true
+ }
+ args := j.RenderArgs(call.Args)
+ prefix := ""
+ if expr.Op == token.NEQ {
+ prefix = "!"
+ }
+ j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintForTrue(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.ForStmt)
+ if !ok {
+ return true
+ }
+ if loop.Init != nil || loop.Post != nil {
+ return true
+ }
+ if !j.IsBoolConst(loop.Cond) || !j.BoolConst(loop.Cond) {
+ return true
+ }
+ j.Errorf(loop, "should use for {} instead of for true {}")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintRegexpRaw(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(call, "regexp.MustCompile") &&
+ !j.IsCallToAST(call, "regexp.Compile") {
+ return true
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if len(call.Args) != 1 {
+ // invalid function call
+ return true
+ }
+ lit, ok := call.Args[0].(*ast.BasicLit)
+ if !ok {
+ // TODO(dominikh): support string concat, maybe support constants
+ return true
+ }
+ if lit.Kind != token.STRING {
+ // invalid function call
+ return true
+ }
+ if lit.Value[0] != '"' {
+ // already a raw string
+ return true
+ }
+ val := lit.Value
+ if !strings.Contains(val, `\\`) {
+ return true
+ }
+
+ bs := false
+ for _, c := range val {
+ if !bs && c == '\\' {
+ bs = true
+ continue
+ }
+ if bs && c == '\\' {
+ bs = false
+ continue
+ }
+ if bs {
+ // backslash followed by non-backslash -> escape sequence
+ return true
+ }
+ }
+
+ j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintIfReturn(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return true
+ }
+ l := len(block.List)
+ if l < 2 {
+ return true
+ }
+ n1, n2 := block.List[l-2], block.List[l-1]
+
+ if len(block.List) >= 3 {
+ if _, ok := block.List[l-3].(*ast.IfStmt); ok {
+ // Do not flag a series of if statements
+ return true
+ }
+ }
+ // if statement with no init, no else, a single condition
+ // checking an identifier or function call and just a return
+ // statement in the body, that returns a boolean constant
+ ifs, ok := n1.(*ast.IfStmt)
+ if !ok {
+ return true
+ }
+ if ifs.Else != nil || ifs.Init != nil {
+ return true
+ }
+ if len(ifs.Body.List) != 1 {
+ return true
+ }
+ if op, ok := ifs.Cond.(*ast.BinaryExpr); ok {
+ switch op.Op {
+ case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ:
+ default:
+ return true
+ }
+ }
+ ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt)
+ if !ok {
+ return true
+ }
+ if len(ret1.Results) != 1 {
+ return true
+ }
+ if !j.IsBoolConst(ret1.Results[0]) {
+ return true
+ }
+
+ ret2, ok := n2.(*ast.ReturnStmt)
+ if !ok {
+ return true
+ }
+ if len(ret2.Results) != 1 {
+ return true
+ }
+ if !j.IsBoolConst(ret2.Results[0]) {
+ return true
+ }
+ j.Errorf(n1, "should use 'return <expr>' instead of 'if <expr> { return <bool> }; return <bool>'")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+// LintRedundantNilCheckWithLen checks for the following reduntant nil-checks:
+//
+// if x == nil || len(x) == 0 {}
+// if x != nil && len(x) != 0 {}
+// if x != nil && len(x) == N {} (where N != 0)
+// if x != nil && len(x) > N {}
+// if x != nil && len(x) >= N {} (where N != 0)
+//
+func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) {
+ isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) {
+ _, ok := expr.(*ast.BasicLit)
+ if ok {
+ return true, lint.IsZero(expr)
+ }
+ id, ok := expr.(*ast.Ident)
+ if !ok {
+ return false, false
+ }
+ c, ok := j.Program.Info.ObjectOf(id).(*types.Const)
+ if !ok {
+ return false, false
+ }
+ return true, c.Val().Kind() == constant.Int && c.Val().String() == "0"
+ }
+
+ fn := func(node ast.Node) bool {
+ // check that expr is "x || y" or "x && y"
+ expr, ok := node.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ if expr.Op != token.LOR && expr.Op != token.LAND {
+ return true
+ }
+ eqNil := expr.Op == token.LOR
+
+ // check that x is "xx == nil" or "xx != nil"
+ x, ok := expr.X.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ if eqNil && x.Op != token.EQL {
+ return true
+ }
+ if !eqNil && x.Op != token.NEQ {
+ return true
+ }
+ xx, ok := x.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if !j.IsNil(x.Y) {
+ return true
+ }
+
+ // check that y is "len(xx) == 0" or "len(xx) ... "
+ y, ok := expr.Y.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ if eqNil && y.Op != token.EQL { // must be len(xx) *==* 0
+ return false
+ }
+ yx, ok := y.X.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ yxFun, ok := yx.Fun.(*ast.Ident)
+ if !ok || yxFun.Name != "len" || len(yx.Args) != 1 {
+ return true
+ }
+ yxArg, ok := yx.Args[0].(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if yxArg.Name != xx.Name {
+ return true
+ }
+
+ if eqNil && !lint.IsZero(y.Y) { // must be len(x) == *0*
+ return true
+ }
+
+ if !eqNil {
+ isConst, isZero := isConstZero(y.Y)
+ if !isConst {
+ return true
+ }
+ switch y.Op {
+ case token.EQL:
+ // avoid false positive for "xx != nil && len(xx) == 0"
+ if isZero {
+ return true
+ }
+ case token.GEQ:
+ // avoid false positive for "xx != nil && len(xx) >= 0"
+ if isZero {
+ return true
+ }
+ case token.NEQ:
+ // avoid false positive for "xx != nil && len(xx) != <non-zero>"
+ if !isZero {
+ return true
+ }
+ case token.GTR:
+ // ok
+ default:
+ return true
+ }
+ }
+
+ // finally check that xx type is one of array, slice, map or chan
+ // this is to prevent false positive in case if xx is a pointer to an array
+ var nilType string
+ switch j.Program.Info.TypeOf(xx).(type) {
+ case *types.Slice:
+ nilType = "nil slices"
+ case *types.Map:
+ nilType = "nil maps"
+ case *types.Chan:
+ nilType = "nil channels"
+ default:
+ return true
+ }
+ j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintSlicing(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ n, ok := node.(*ast.SliceExpr)
+ if !ok {
+ return true
+ }
+ if n.Max != nil {
+ return true
+ }
+ s, ok := n.X.(*ast.Ident)
+ if !ok || s.Obj == nil {
+ return true
+ }
+ call, ok := n.High.(*ast.CallExpr)
+ if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() {
+ return true
+ }
+ fun, ok := call.Fun.(*ast.Ident)
+ if !ok || fun.Name != "len" {
+ return true
+ }
+ if _, ok := j.Program.Info.ObjectOf(fun).(*types.Builtin); !ok {
+ return true
+ }
+ arg, ok := call.Args[0].(*ast.Ident)
+ if !ok || arg.Obj != s.Obj {
+ return true
+ }
+ j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func refersTo(info *types.Info, expr ast.Expr, ident *ast.Ident) bool {
+ found := false
+ fn := func(node ast.Node) bool {
+ ident2, ok := node.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if info.ObjectOf(ident) == info.ObjectOf(ident2) {
+ found = true
+ return false
+ }
+ return true
+ }
+ ast.Inspect(expr, fn)
+ return found
+}
+
+func (c *Checker) LintLoopAppend(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return true
+ }
+ if !lint.IsBlank(loop.Key) {
+ return true
+ }
+ val, ok := loop.Value.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if len(loop.Body.List) != 1 {
+ return true
+ }
+ stmt, ok := loop.Body.List[0].(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 {
+ return true
+ }
+ if refersTo(j.Program.Info, stmt.Lhs[0], val) {
+ return true
+ }
+ call, ok := stmt.Rhs[0].(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if len(call.Args) != 2 || call.Ellipsis.IsValid() {
+ return true
+ }
+ fun, ok := call.Fun.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ obj := j.Program.Info.ObjectOf(fun)
+ fn, ok := obj.(*types.Builtin)
+ if !ok || fn.Name() != "append" {
+ return true
+ }
+
+ src := j.Program.Info.TypeOf(loop.X)
+ dst := j.Program.Info.TypeOf(call.Args[0])
+ // TODO(dominikh) remove nil check once Go issue #15173 has
+ // been fixed
+ if src == nil {
+ return true
+ }
+ if !types.Identical(src, dst) {
+ return true
+ }
+
+ if j.Render(stmt.Lhs[0]) != j.Render(call.Args[0]) {
+ return true
+ }
+
+ el, ok := call.Args[1].(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if j.Program.Info.ObjectOf(val) != j.Program.Info.ObjectOf(el) {
+ return true
+ }
+ j.Errorf(loop, "should replace loop with %s = append(%s, %s...)",
+ j.Render(stmt.Lhs[0]), j.Render(call.Args[0]), j.Render(loop.X))
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintTimeSince(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(sel.X, "time.Now") {
+ return true
+ }
+ if sel.Sel.Name != "Sub" {
+ return true
+ }
+ j.Errorf(call, "should use time.Since instead of time.Now().Sub")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintTimeUntil(j *lint.Job) {
+ if !j.IsGoVersion(8) {
+ return
+ }
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(call, "(time.Time).Sub") {
+ return true
+ }
+ if !j.IsCallToAST(call.Args[0], "time.Now") {
+ return true
+ }
+ j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintSimplerReturn(j *lint.Job) {
+ fn1 := func(node ast.Node) bool {
+ var ret *ast.FieldList
+ switch x := node.(type) {
+ case *ast.FuncDecl:
+ ret = x.Type.Results
+ case *ast.FuncLit:
+ ret = x.Type.Results
+ default:
+ return true
+ }
+ if ret == nil {
+ return true
+ }
+
+ fn2 := func(node ast.Node) bool {
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return true
+ }
+ if len(block.List) < 2 {
+ return true
+ }
+
+ outer:
+ for i, stmt := range block.List {
+ if i == len(block.List)-1 {
+ break
+ }
+ if i > 0 {
+ // don't flag an if in a series of ifs
+ if _, ok := block.List[i-1].(*ast.IfStmt); ok {
+ continue
+ }
+ }
+
+ // if <id1> != nil
+ ifs, ok := stmt.(*ast.IfStmt)
+ if !ok || len(ifs.Body.List) != 1 || ifs.Else != nil {
+ continue
+ }
+ expr, ok := ifs.Cond.(*ast.BinaryExpr)
+ if !ok || expr.Op != token.NEQ || !j.IsNil(expr.Y) {
+ continue
+ }
+ id1, ok := expr.X.(*ast.Ident)
+ if !ok {
+ continue
+ }
+
+ // return ..., <id1>
+ ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt)
+ if !ok || len(ret1.Results) == 0 {
+ continue
+ }
+ var results1 []types.Object
+ for _, res := range ret1.Results {
+ ident, ok := res.(*ast.Ident)
+ if !ok {
+ continue outer
+ }
+ results1 = append(results1, j.Program.Info.ObjectOf(ident))
+ }
+ if results1[len(results1)-1] != j.Program.Info.ObjectOf(id1) {
+ continue
+ }
+
+ // return ..., [<id1> | nil]
+ ret2, ok := block.List[i+1].(*ast.ReturnStmt)
+ if !ok || len(ret2.Results) == 0 {
+ continue
+ }
+ var results2 []types.Object
+ for _, res := range ret2.Results {
+ ident, ok := res.(*ast.Ident)
+ if !ok {
+ continue outer
+ }
+ results2 = append(results2, j.Program.Info.ObjectOf(ident))
+ }
+ _, isNil := results2[len(results2)-1].(*types.Nil)
+ if results2[len(results2)-1] != j.Program.Info.ObjectOf(id1) &&
+ !isNil {
+ continue
+ }
+ for i, v := range results1[:len(results1)-1] {
+ if v != results2[i] {
+ continue outer
+ }
+ }
+
+ id1Obj := j.Program.Info.ObjectOf(id1)
+ if id1Obj == nil {
+ continue
+ }
+ _, idIface := id1Obj.Type().Underlying().(*types.Interface)
+ _, retIface := j.Program.Info.TypeOf(ret.List[len(ret.List)-1].Type).Underlying().(*types.Interface)
+
+ if retIface && !idIface {
+ // When the return value is an interface, but the
+ // identifier is not, an explicit check for nil is
+ // required to return an untyped nil.
+ continue
+ }
+
+ j.Errorf(ifs, "'if %s != nil { return %s }; return %s' can be simplified to 'return %s'",
+ j.Render(expr.X), j.RenderArgs(ret1.Results),
+ j.RenderArgs(ret2.Results), j.RenderArgs(ret1.Results))
+ }
+ return true
+ }
+ ast.Inspect(node, fn2)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn1)
+ }
+}
+
+func (c *Checker) LintUnnecessaryBlank(j *lint.Job) {
+ fn1 := func(node ast.Node) {
+ assign, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return
+ }
+ if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
+ return
+ }
+ if !lint.IsBlank(assign.Lhs[1]) {
+ return
+ }
+ switch rhs := assign.Rhs[0].(type) {
+ case *ast.IndexExpr:
+ // The type-checker should make sure that it's a map, but
+ // let's be safe.
+ if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok {
+ return
+ }
+ case *ast.UnaryExpr:
+ if rhs.Op != token.ARROW {
+ return
+ }
+ default:
+ return
+ }
+ cp := *assign
+ cp.Lhs = cp.Lhs[0:1]
+ j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign))
+ }
+
+ fn2 := func(node ast.Node) {
+ stmt, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return
+ }
+ if len(stmt.Lhs) != len(stmt.Rhs) {
+ return
+ }
+ for i, lh := range stmt.Lhs {
+ rh := stmt.Rhs[i]
+ if !lint.IsBlank(lh) {
+ continue
+ }
+ expr, ok := rh.(*ast.UnaryExpr)
+ if !ok {
+ continue
+ }
+ if expr.Op != token.ARROW {
+ continue
+ }
+ j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'")
+ }
+ }
+
+ fn3 := func(node ast.Node) {
+ rs, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return
+ }
+ if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) {
+ j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`")
+ }
+ }
+
+ fn := func(node ast.Node) bool {
+ fn1(node)
+ fn2(node)
+ if j.IsGoVersion(4) {
+ fn3(node)
+ }
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
+ var skip ast.Node
+ fn := func(node ast.Node) bool {
+ // Do not suggest type conversion between pointers
+ if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND {
+ if lit, ok := unary.X.(*ast.CompositeLit); ok {
+ skip = lit
+ }
+ return true
+ }
+
+ if node == skip {
+ return true
+ }
+
+ lit, ok := node.(*ast.CompositeLit)
+ if !ok {
+ return true
+ }
+ typ1, _ := j.Program.Info.TypeOf(lit.Type).(*types.Named)
+ if typ1 == nil {
+ return true
+ }
+ s1, ok := typ1.Underlying().(*types.Struct)
+ if !ok {
+ return true
+ }
+
+ var typ2 *types.Named
+ var ident *ast.Ident
+ getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) {
+ sel, ok := expr.(*ast.SelectorExpr)
+ if !ok {
+ return nil, nil, false
+ }
+ ident, ok := sel.X.(*ast.Ident)
+ if !ok {
+ return nil, nil, false
+ }
+ typ := j.Program.Info.TypeOf(sel.X)
+ return typ, ident, typ != nil
+ }
+ if len(lit.Elts) == 0 {
+ return true
+ }
+ if s1.NumFields() != len(lit.Elts) {
+ return true
+ }
+ for i, elt := range lit.Elts {
+ var t types.Type
+ var id *ast.Ident
+ var ok bool
+ switch elt := elt.(type) {
+ case *ast.SelectorExpr:
+ t, id, ok = getSelType(elt)
+ if !ok {
+ return true
+ }
+ if i >= s1.NumFields() || s1.Field(i).Name() != elt.Sel.Name {
+ return true
+ }
+ case *ast.KeyValueExpr:
+ var sel *ast.SelectorExpr
+ sel, ok = elt.Value.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+
+ if elt.Key.(*ast.Ident).Name != sel.Sel.Name {
+ return true
+ }
+ t, id, ok = getSelType(elt.Value)
+ }
+ if !ok {
+ return true
+ }
+ // All fields must be initialized from the same object
+ if ident != nil && ident.Obj != id.Obj {
+ return true
+ }
+ typ2, _ = t.(*types.Named)
+ if typ2 == nil {
+ return true
+ }
+ ident = id
+ }
+
+ if typ2 == nil {
+ return true
+ }
+
+ if typ1.Obj().Pkg() != typ2.Obj().Pkg() {
+ // Do not suggest type conversions between different
+ // packages. Types in different packages might only match
+ // by coincidence. Furthermore, if the dependency ever
+ // adds more fields to its type, it could break the code
+ // that relies on the type conversion to work.
+ return true
+ }
+
+ s2, ok := typ2.Underlying().(*types.Struct)
+ if !ok {
+ return true
+ }
+ if typ1 == typ2 {
+ return true
+ }
+ if !structsIdentical(s1, s2) {
+ return true
+ }
+ j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal",
+ ident.Name, typ2.Obj().Name(), typ1.Obj().Name())
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintTrim(j *lint.Job) {
+ sameNonDynamic := func(node1, node2 ast.Node) bool {
+ if reflect.TypeOf(node1) != reflect.TypeOf(node2) {
+ return false
+ }
+
+ switch node1 := node1.(type) {
+ case *ast.Ident:
+ return node1.Obj == node2.(*ast.Ident).Obj
+ case *ast.SelectorExpr:
+ return j.Render(node1) == j.Render(node2)
+ case *ast.IndexExpr:
+ return j.Render(node1) == j.Render(node2)
+ }
+ return false
+ }
+
+ isLenOnIdent := func(fn ast.Expr, ident ast.Expr) bool {
+ call, ok := fn.(*ast.CallExpr)
+ if !ok {
+ return false
+ }
+ if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "len" {
+ return false
+ }
+ if len(call.Args) != 1 {
+ return false
+ }
+ return sameNonDynamic(call.Args[0], ident)
+ }
+
+ fn := func(node ast.Node) bool {
+ var pkg string
+ var fun string
+
+ ifstmt, ok := node.(*ast.IfStmt)
+ if !ok {
+ return true
+ }
+ if ifstmt.Init != nil {
+ return true
+ }
+ if ifstmt.Else != nil {
+ return true
+ }
+ if len(ifstmt.Body.List) != 1 {
+ return true
+ }
+ condCall, ok := ifstmt.Cond.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ call, ok := condCall.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if lint.IsIdent(call.X, "strings") {
+ pkg = "strings"
+ } else if lint.IsIdent(call.X, "bytes") {
+ pkg = "bytes"
+ } else {
+ return true
+ }
+ if lint.IsIdent(call.Sel, "HasPrefix") {
+ fun = "HasPrefix"
+ } else if lint.IsIdent(call.Sel, "HasSuffix") {
+ fun = "HasSuffix"
+ } else {
+ return true
+ }
+
+ assign, ok := ifstmt.Body.List[0].(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if assign.Tok != token.ASSIGN {
+ return true
+ }
+ if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 {
+ return true
+ }
+ if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) {
+ return true
+ }
+ slice, ok := assign.Rhs[0].(*ast.SliceExpr)
+ if !ok {
+ return true
+ }
+ if slice.Slice3 {
+ return true
+ }
+ if !sameNonDynamic(slice.X, condCall.Args[0]) {
+ return true
+ }
+ var index ast.Expr
+ switch fun {
+ case "HasPrefix":
+ // TODO(dh) We could detect a High that is len(s), but another
+ // rule will already flag that, anyway.
+ if slice.High != nil {
+ return true
+ }
+ index = slice.Low
+ case "HasSuffix":
+ if slice.Low != nil {
+ n, ok := j.ExprToInt(slice.Low)
+ if !ok || n != 0 {
+ return true
+ }
+ }
+ index = slice.High
+ }
+
+ switch index := index.(type) {
+ case *ast.CallExpr:
+ if fun != "HasPrefix" {
+ return true
+ }
+ if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" {
+ return true
+ }
+ if len(index.Args) != 1 {
+ return true
+ }
+ id3 := index.Args[0]
+ switch oid3 := condCall.Args[1].(type) {
+ case *ast.BasicLit:
+ if pkg != "strings" {
+ return false
+ }
+ lit, ok := id3.(*ast.BasicLit)
+ if !ok {
+ return true
+ }
+ s1, ok1 := j.ExprToString(lit)
+ s2, ok2 := j.ExprToString(condCall.Args[1])
+ if !ok1 || !ok2 || s1 != s2 {
+ return true
+ }
+ default:
+ if !sameNonDynamic(id3, oid3) {
+ return true
+ }
+ }
+ case *ast.BasicLit, *ast.Ident:
+ if fun != "HasPrefix" {
+ return true
+ }
+ if pkg != "strings" {
+ return true
+ }
+ string, ok1 := j.ExprToString(condCall.Args[1])
+ int, ok2 := j.ExprToInt(slice.Low)
+ if !ok1 || !ok2 || int != int64(len(string)) {
+ return true
+ }
+ case *ast.BinaryExpr:
+ if fun != "HasSuffix" {
+ return true
+ }
+ if index.Op != token.SUB {
+ return true
+ }
+ if !isLenOnIdent(index.X, condCall.Args[0]) ||
+ !isLenOnIdent(index.Y, condCall.Args[1]) {
+ return true
+ }
+ default:
+ return true
+ }
+
+ var replacement string
+ switch fun {
+ case "HasPrefix":
+ replacement = "TrimPrefix"
+ case "HasSuffix":
+ replacement = "TrimSuffix"
+ }
+ j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintLoopSlide(j *lint.Job) {
+ // TODO(dh): detect bs[i+offset] in addition to bs[offset+i]
+ // TODO(dh): consider merging this function with LintLoopCopy
+ // TODO(dh): detect length that is an expression, not a variable name
+ // TODO(dh): support sliding to a different offset than the beginning of the slice
+
+ fn := func(node ast.Node) bool {
+ /*
+ for i := 0; i < n; i++ {
+ bs[i] = bs[offset+i]
+ }
+
+ ↓
+
+ copy(bs[:n], bs[offset:offset+n])
+ */
+
+ loop, ok := node.(*ast.ForStmt)
+ if !ok || len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil {
+ return true
+ }
+ assign, ok := loop.Init.(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !lint.IsZero(assign.Rhs[0]) {
+ return true
+ }
+ initvar, ok := assign.Lhs[0].(*ast.Ident)
+ if !ok {
+ return true
+ }
+ post, ok := loop.Post.(*ast.IncDecStmt)
+ if !ok || post.Tok != token.INC {
+ return true
+ }
+ postvar, ok := post.X.(*ast.Ident)
+ if !ok || j.Program.Info.ObjectOf(postvar) != j.Program.Info.ObjectOf(initvar) {
+ return true
+ }
+ bin, ok := loop.Cond.(*ast.BinaryExpr)
+ if !ok || bin.Op != token.LSS {
+ return true
+ }
+ binx, ok := bin.X.(*ast.Ident)
+ if !ok || j.Program.Info.ObjectOf(binx) != j.Program.Info.ObjectOf(initvar) {
+ return true
+ }
+ biny, ok := bin.Y.(*ast.Ident)
+ if !ok {
+ return true
+ }
+
+ assign, ok = loop.Body.List[0].(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || assign.Tok != token.ASSIGN {
+ return true
+ }
+ lhs, ok := assign.Lhs[0].(*ast.IndexExpr)
+ if !ok {
+ return true
+ }
+ rhs, ok := assign.Rhs[0].(*ast.IndexExpr)
+ if !ok {
+ return true
+ }
+
+ bs1, ok := lhs.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ bs2, ok := rhs.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ obj1 := j.Program.Info.ObjectOf(bs1)
+ obj2 := j.Program.Info.ObjectOf(bs2)
+ if obj1 != obj2 {
+ return true
+ }
+ if _, ok := obj1.Type().Underlying().(*types.Slice); !ok {
+ return true
+ }
+
+ index1, ok := lhs.Index.(*ast.Ident)
+ if !ok || j.Program.Info.ObjectOf(index1) != j.Program.Info.ObjectOf(initvar) {
+ return true
+ }
+ index2, ok := rhs.Index.(*ast.BinaryExpr)
+ if !ok || index2.Op != token.ADD {
+ return true
+ }
+ add1, ok := index2.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ add2, ok := index2.Y.(*ast.Ident)
+ if !ok || j.Program.Info.ObjectOf(add2) != j.Program.Info.ObjectOf(initvar) {
+ return true
+ }
+
+ j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", j.Render(bs1), j.Render(biny), j.Render(bs1), j.Render(add1))
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintMakeLenCap(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" {
+ // FIXME check whether make is indeed the built-in function
+ return true
+ }
+ switch len(call.Args) {
+ case 2:
+ // make(T, len)
+ if _, ok := j.Program.Info.TypeOf(call.Args[0]).Underlying().(*types.Slice); ok {
+ break
+ }
+ if lint.IsZero(call.Args[1]) {
+ j.Errorf(call.Args[1], "should use make(%s) instead", j.Render(call.Args[0]))
+ }
+ case 3:
+ // make(T, len, cap)
+ if j.Render(call.Args[1]) == j.Render(call.Args[2]) {
+ j.Errorf(call.Args[1], "should use make(%s, %s) instead", j.Render(call.Args[0]), j.Render(call.Args[1]))
+ }
+ }
+ return false
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintAssertNotNil(j *lint.Job) {
+ isNilCheck := func(ident *ast.Ident, expr ast.Expr) bool {
+ xbinop, ok := expr.(*ast.BinaryExpr)
+ if !ok || xbinop.Op != token.NEQ {
+ return false
+ }
+ xident, ok := xbinop.X.(*ast.Ident)
+ if !ok || xident.Obj != ident.Obj {
+ return false
+ }
+ if !j.IsNil(xbinop.Y) {
+ return false
+ }
+ return true
+ }
+ isOKCheck := func(ident *ast.Ident, expr ast.Expr) bool {
+ yident, ok := expr.(*ast.Ident)
+ if !ok || yident.Obj != ident.Obj {
+ return false
+ }
+ return true
+ }
+ fn := func(node ast.Node) bool {
+ ifstmt, ok := node.(*ast.IfStmt)
+ if !ok {
+ return true
+ }
+ assign, ok := ifstmt.Init.(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !lint.IsBlank(assign.Lhs[0]) {
+ return true
+ }
+ assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr)
+ if !ok {
+ return true
+ }
+ binop, ok := ifstmt.Cond.(*ast.BinaryExpr)
+ if !ok || binop.Op != token.LAND {
+ return true
+ }
+ assertIdent, ok := assert.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ assignIdent, ok := assign.Lhs[1].(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if !(isNilCheck(assertIdent, binop.X) && isOKCheck(assignIdent, binop.Y)) &&
+ !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) {
+ return true
+ }
+ j.Errorf(ifstmt, "when %s is true, %s can't be nil", j.Render(assignIdent), j.Render(assertIdent))
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintDeclareAssign(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return true
+ }
+ if len(block.List) < 2 {
+ return true
+ }
+ for i, stmt := range block.List[:len(block.List)-1] {
+ _ = i
+ decl, ok := stmt.(*ast.DeclStmt)
+ if !ok {
+ continue
+ }
+ gdecl, ok := decl.Decl.(*ast.GenDecl)
+ if !ok || gdecl.Tok != token.VAR || len(gdecl.Specs) != 1 {
+ continue
+ }
+ vspec, ok := gdecl.Specs[0].(*ast.ValueSpec)
+ if !ok || len(vspec.Names) != 1 || len(vspec.Values) != 0 {
+ continue
+ }
+
+ assign, ok := block.List[i+1].(*ast.AssignStmt)
+ if !ok || assign.Tok != token.ASSIGN {
+ continue
+ }
+ if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 {
+ continue
+ }
+ ident, ok := assign.Lhs[0].(*ast.Ident)
+ if !ok {
+ continue
+ }
+ if vspec.Names[0].Obj != ident.Obj {
+ continue
+ }
+
+ if refersTo(j.Program.Info, assign.Rhs[0], ident) {
+ continue
+ }
+ j.Errorf(decl, "should merge variable declaration with assignment on next line")
+ }
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintRedundantBreak(j *lint.Job) {
+ fn1 := func(node ast.Node) {
+ clause, ok := node.(*ast.CaseClause)
+ if !ok {
+ return
+ }
+ if len(clause.Body) < 2 {
+ return
+ }
+ branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt)
+ if !ok || branch.Tok != token.BREAK || branch.Label != nil {
+ return
+ }
+ j.Errorf(branch, "redundant break statement")
+ return
+ }
+ fn2 := func(node ast.Node) {
+ var ret *ast.FieldList
+ var body *ast.BlockStmt
+ switch x := node.(type) {
+ case *ast.FuncDecl:
+ ret = x.Type.Results
+ body = x.Body
+ case *ast.FuncLit:
+ ret = x.Type.Results
+ body = x.Body
+ default:
+ return
+ }
+ // if the func has results, a return can't be redundant.
+ // similarly, if there are no statements, there can be
+ // no return.
+ if ret != nil || body == nil || len(body.List) < 1 {
+ return
+ }
+ rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt)
+ if !ok {
+ return
+ }
+ // we don't need to check rst.Results as we already
+ // checked x.Type.Results to be nil.
+ j.Errorf(rst, "redundant return statement")
+ }
+ fn := func(node ast.Node) bool {
+ fn1(node)
+ fn2(node)
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool {
+ // OPT(dh): we can cache the type lookup
+ idx := strings.IndexRune(iface, '.')
+ var scope *types.Scope
+ var ifaceName string
+ if idx == -1 {
+ scope = types.Universe
+ ifaceName = iface
+ } else {
+ pkgName := iface[:idx]
+ pkg := j.Program.Prog.Package(pkgName)
+ if pkg == nil {
+ return false
+ }
+ scope = pkg.Pkg.Scope()
+ ifaceName = iface[idx+1:]
+ }
+
+ obj := scope.Lookup(ifaceName)
+ if obj == nil {
+ return false
+ }
+ i, ok := obj.Type().Underlying().(*types.Interface)
+ if !ok {
+ return false
+ }
+ return types.Implements(typ, i)
+}
+
+func (c *Checker) LintRedundantSprintf(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(call, "fmt.Sprintf") {
+ return true
+ }
+ if len(call.Args) != 2 {
+ return true
+ }
+ if s, ok := j.ExprToString(call.Args[0]); !ok || s != "%s" {
+ return true
+ }
+ pkg := j.NodePackage(call)
+ arg := call.Args[1]
+ typ := pkg.Info.TypeOf(arg)
+
+ if c.Implements(j, typ, "fmt.Stringer") {
+ j.Errorf(call, "should use String() instead of fmt.Sprintf")
+ return true
+ }
+
+ if typ.Underlying() == types.Universe.Lookup("string").Type() {
+ if typ == types.Universe.Lookup("string").Type() {
+ j.Errorf(call, "the argument is already a string, there's no need to use fmt.Sprintf")
+ } else {
+ j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf")
+ }
+ }
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintErrorsNewSprintf(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ if !j.IsCallToAST(node, "errors.New") {
+ return true
+ }
+ call := node.(*ast.CallExpr)
+ if !j.IsCallToAST(call.Args[0], "fmt.Sprintf") {
+ return true
+ }
+ j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) LintRangeStringRunes(j *lint.Job) {
+ sharedcheck.CheckRangeStringRunes(c.nodeFns, j)
+}
+
+func (c *Checker) LintNilCheckAroundRange(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ ifstmt, ok := node.(*ast.IfStmt)
+ if !ok {
+ return true
+ }
+
+ cond, ok := ifstmt.Cond.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+
+ if cond.Op != token.NEQ || !j.IsNil(cond.Y) || len(ifstmt.Body.List) != 1 {
+ return true
+ }
+
+ loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt)
+ if !ok {
+ return true
+ }
+ ifXIdent, ok := cond.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ rangeXIdent, ok := loop.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if ifXIdent.Obj != rangeXIdent.Obj {
+ return true
+ }
+ switch j.Program.Info.TypeOf(rangeXIdent).(type) {
+ case *types.Slice, *types.Map:
+ j.Errorf(node, "unnecessary nil check around range")
+ }
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
diff --git a/vendor/honnef.co/go/tools/simple/lint17.go b/vendor/honnef.co/go/tools/simple/lint17.go
new file mode 100644
index 0000000..53f529c
--- /dev/null
+++ b/vendor/honnef.co/go/tools/simple/lint17.go
@@ -0,0 +1,7 @@
+// +build !go1.8
+
+package simple
+
+import "go/types"
+
+var structsIdentical = types.Identical
diff --git a/vendor/honnef.co/go/tools/simple/lint18.go b/vendor/honnef.co/go/tools/simple/lint18.go
new file mode 100644
index 0000000..ab9ea72
--- /dev/null
+++ b/vendor/honnef.co/go/tools/simple/lint18.go
@@ -0,0 +1,7 @@
+// +build go1.8
+
+package simple
+
+import "go/types"
+
+var structsIdentical = types.IdenticalIgnoreTags
diff --git a/vendor/honnef.co/go/tools/ssa/LICENSE b/vendor/honnef.co/go/tools/ssa/LICENSE
new file mode 100644
index 0000000..aee4804
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+Copyright (c) 2016 Dominik Honnef. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/honnef.co/go/tools/ssa/blockopt.go b/vendor/honnef.co/go/tools/ssa/blockopt.go
new file mode 100644
index 0000000..22c9a4c
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/blockopt.go
@@ -0,0 +1,195 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// Simple block optimizations to simplify the control flow graph.
+
+// TODO(adonovan): opt: instead of creating several "unreachable" blocks
+// per function in the Builder, reuse a single one (e.g. at Blocks[1])
+// to reduce garbage.
+
+import (
+ "fmt"
+ "os"
+)
+
+// If true, perform sanity checking and show progress at each
+// successive iteration of optimizeBlocks. Very verbose.
+const debugBlockOpt = false
+
+// markReachable sets Index=-1 for all blocks reachable from b.
+func markReachable(b *BasicBlock) {
+ b.Index = -1
+ for _, succ := range b.Succs {
+ if succ.Index == 0 {
+ markReachable(succ)
+ }
+ }
+}
+
+func DeleteUnreachableBlocks(f *Function) {
+ deleteUnreachableBlocks(f)
+}
+
+// deleteUnreachableBlocks marks all reachable blocks of f and
+// eliminates (nils) all others, including possibly cyclic subgraphs.
+//
+func deleteUnreachableBlocks(f *Function) {
+ const white, black = 0, -1
+ // We borrow b.Index temporarily as the mark bit.
+ for _, b := range f.Blocks {
+ b.Index = white
+ }
+ markReachable(f.Blocks[0])
+ if f.Recover != nil {
+ markReachable(f.Recover)
+ }
+ for i, b := range f.Blocks {
+ if b.Index == white {
+ for _, c := range b.Succs {
+ if c.Index == black {
+ c.removePred(b) // delete white->black edge
+ }
+ }
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "unreachable", b)
+ }
+ f.Blocks[i] = nil // delete b
+ }
+ }
+ f.removeNilBlocks()
+}
+
+// jumpThreading attempts to apply simple jump-threading to block b,
+// in which a->b->c become a->c if b is just a Jump.
+// The result is true if the optimization was applied.
+//
+func jumpThreading(f *Function, b *BasicBlock) bool {
+ if b.Index == 0 {
+ return false // don't apply to entry block
+ }
+ if b.Instrs == nil {
+ return false
+ }
+ if _, ok := b.Instrs[0].(*Jump); !ok {
+ return false // not just a jump
+ }
+ c := b.Succs[0]
+ if c == b {
+ return false // don't apply to degenerate jump-to-self.
+ }
+ if c.hasPhi() {
+ return false // not sound without more effort
+ }
+ for j, a := range b.Preds {
+ a.replaceSucc(b, c)
+
+ // If a now has two edges to c, replace its degenerate If by Jump.
+ if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c {
+ jump := new(Jump)
+ jump.setBlock(a)
+ a.Instrs[len(a.Instrs)-1] = jump
+ a.Succs = a.Succs[:1]
+ c.removePred(b)
+ } else {
+ if j == 0 {
+ c.replacePred(b, a)
+ } else {
+ c.Preds = append(c.Preds, a)
+ }
+ }
+
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c)
+ }
+ }
+ f.Blocks[b.Index] = nil // delete b
+ return true
+}
+
+// fuseBlocks attempts to apply the block fusion optimization to block
+// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1.
+// The result is true if the optimization was applied.
+//
+func fuseBlocks(f *Function, a *BasicBlock) bool {
+ if len(a.Succs) != 1 {
+ return false
+ }
+ b := a.Succs[0]
+ if len(b.Preds) != 1 {
+ return false
+ }
+
+ // Degenerate &&/|| ops may result in a straight-line CFG
+ // containing φ-nodes. (Ideally we'd replace such them with
+ // their sole operand but that requires Referrers, built later.)
+ if b.hasPhi() {
+ return false // not sound without further effort
+ }
+
+ // Eliminate jump at end of A, then copy all of B across.
+ a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...)
+ for _, instr := range b.Instrs {
+ instr.setBlock(a)
+ }
+
+ // A inherits B's successors
+ a.Succs = append(a.succs2[:0], b.Succs...)
+
+ // Fix up Preds links of all successors of B.
+ for _, c := range b.Succs {
+ c.replacePred(b, a)
+ }
+
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "fuseBlocks", a, b)
+ }
+
+ f.Blocks[b.Index] = nil // delete b
+ return true
+}
+
+func OptimizeBlocks(f *Function) {
+ optimizeBlocks(f)
+}
+
+// optimizeBlocks() performs some simple block optimizations on a
+// completed function: dead block elimination, block fusion, jump
+// threading.
+//
+func optimizeBlocks(f *Function) {
+ deleteUnreachableBlocks(f)
+
+ // Loop until no further progress.
+ changed := true
+ for changed {
+ changed = false
+
+ if debugBlockOpt {
+ f.WriteTo(os.Stderr)
+ mustSanityCheck(f, nil)
+ }
+
+ for _, b := range f.Blocks {
+ // f.Blocks will temporarily contain nils to indicate
+ // deleted blocks; we remove them at the end.
+ if b == nil {
+ continue
+ }
+
+ // Fuse blocks. b->c becomes bc.
+ if fuseBlocks(f, b) {
+ changed = true
+ }
+
+ // a->b->c becomes a->c if b contains only a Jump.
+ if jumpThreading(f, b) {
+ changed = true
+ continue // (b was disconnected)
+ }
+ }
+ }
+ f.removeNilBlocks()
+}
diff --git a/vendor/honnef.co/go/tools/ssa/builder.go b/vendor/honnef.co/go/tools/ssa/builder.go
new file mode 100644
index 0000000..bfb7a2b
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/builder.go
@@ -0,0 +1,2383 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the BUILD phase of SSA construction.
+//
+// SSA construction has two phases, CREATE and BUILD. In the CREATE phase
+// (create.go), all packages are constructed and type-checked and
+// definitions of all package members are created, method-sets are
+// computed, and wrapper methods are synthesized.
+// ssa.Packages are created in arbitrary order.
+//
+// In the BUILD phase (builder.go), the builder traverses the AST of
+// each Go source function and generates SSA instructions for the
+// function body. Initializer expressions for package-level variables
+// are emitted to the package's init() function in the order specified
+// by go/types.Info.InitOrder, then code for each function in the
+// package is generated in lexical order.
+// The BUILD phases for distinct packages are independent and are
+// executed in parallel.
+//
+// TODO(adonovan): indeed, building functions is now embarrassingly parallel.
+// Audit for concurrency then benchmark using more goroutines.
+//
+// The builder's and Program's indices (maps) are populated and
+// mutated during the CREATE phase, but during the BUILD phase they
+// remain constant. The sole exception is Prog.methodSets and its
+// related maps, which are protected by a dedicated mutex.
+
+import (
+ "fmt"
+ "go/ast"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "os"
+ "sync"
+)
+
+type opaqueType struct {
+ types.Type
+ name string
+}
+
+func (t *opaqueType) String() string { return t.name }
+
+var (
+ varOk = newVar("ok", tBool)
+ varIndex = newVar("index", tInt)
+
+ // Type constants.
+ tBool = types.Typ[types.Bool]
+ tByte = types.Typ[types.Byte]
+ tInt = types.Typ[types.Int]
+ tInvalid = types.Typ[types.Invalid]
+ tString = types.Typ[types.String]
+ tUntypedNil = types.Typ[types.UntypedNil]
+ tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
+ tEface = new(types.Interface)
+
+ // SSA Value constants.
+ vZero = intConst(0)
+ vOne = intConst(1)
+ vTrue = NewConst(exact.MakeBool(true), tBool)
+)
+
+// builder holds state associated with the package currently being built.
+// Its methods contain all the logic for AST-to-SSA conversion.
+type builder struct{}
+
+// cond emits to fn code to evaluate boolean condition e and jump
+// to t or f depending on its value, performing various simplifications.
+//
+// Postcondition: fn.currentBlock is nil.
+//
+func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ b.cond(fn, e.X, t, f)
+ return
+
+ case *ast.BinaryExpr:
+ switch e.Op {
+ case token.LAND:
+ ltrue := fn.newBasicBlock("cond.true")
+ b.cond(fn, e.X, ltrue, f)
+ fn.currentBlock = ltrue
+ b.cond(fn, e.Y, t, f)
+ return
+
+ case token.LOR:
+ lfalse := fn.newBasicBlock("cond.false")
+ b.cond(fn, e.X, t, lfalse)
+ fn.currentBlock = lfalse
+ b.cond(fn, e.Y, t, f)
+ return
+ }
+
+ case *ast.UnaryExpr:
+ if e.Op == token.NOT {
+ b.cond(fn, e.X, f, t)
+ return
+ }
+ }
+
+ // A traditional compiler would simplify "if false" (etc) here
+ // but we do not, for better fidelity to the source code.
+ //
+ // The value of a constant condition may be platform-specific,
+ // and may cause blocks that are reachable in some configuration
+ // to be hidden from subsequent analyses such as bug-finding tools.
+ emitIf(fn, b.expr(fn, e), t, f)
+}
+
+// logicalBinop emits code to fn to evaluate e, a &&- or
+// ||-expression whose reified boolean value is wanted.
+// The value is returned.
+//
+func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
+ rhs := fn.newBasicBlock("binop.rhs")
+ done := fn.newBasicBlock("binop.done")
+
+ // T(e) = T(e.X) = T(e.Y) after untyped constants have been
+ // eliminated.
+ // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
+ t := fn.Pkg.typeOf(e)
+
+ var short Value // value of the short-circuit path
+ switch e.Op {
+ case token.LAND:
+ b.cond(fn, e.X, rhs, done)
+ short = NewConst(exact.MakeBool(false), t)
+
+ case token.LOR:
+ b.cond(fn, e.X, done, rhs)
+ short = NewConst(exact.MakeBool(true), t)
+ }
+
+ // Is rhs unreachable?
+ if rhs.Preds == nil {
+ // Simplify false&&y to false, true||y to true.
+ fn.currentBlock = done
+ return short
+ }
+
+ // Is done unreachable?
+ if done.Preds == nil {
+ // Simplify true&&y (or false||y) to y.
+ fn.currentBlock = rhs
+ return b.expr(fn, e.Y)
+ }
+
+ // All edges from e.X to done carry the short-circuit value.
+ var edges []Value
+ for _ = range done.Preds {
+ edges = append(edges, short)
+ }
+
+ // The edge from e.Y to done carries the value of e.Y.
+ fn.currentBlock = rhs
+ edges = append(edges, b.expr(fn, e.Y))
+ emitJump(fn, done)
+ fn.currentBlock = done
+
+ phi := &Phi{Edges: edges, Comment: e.Op.String()}
+ phi.pos = e.OpPos
+ phi.typ = t
+ return done.emit(phi)
+}
+
+// exprN lowers a multi-result expression e to SSA form, emitting code
+// to fn and returning a single Value whose type is a *types.Tuple.
+// The caller must access the components via Extract.
+//
+// Multi-result expressions include CallExprs in a multi-value
+// assignment or return statement, and "value,ok" uses of
+// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
+// is token.ARROW).
+//
+func (b *builder) exprN(fn *Function, e ast.Expr) Value {
+ typ := fn.Pkg.typeOf(e).(*types.Tuple)
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return b.exprN(fn, e.X)
+
+ case *ast.CallExpr:
+ // Currently, no built-in function nor type conversion
+ // has multiple results, so we can avoid some of the
+ // cases for single-valued CallExpr.
+ var c Call
+ b.setCall(fn, e, &c.Call)
+ c.typ = typ
+ return fn.emit(&c)
+
+ case *ast.IndexExpr:
+ mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
+ lookup := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
+ CommaOk: true,
+ }
+ lookup.setType(typ)
+ lookup.setPos(e.Lbrack)
+ return fn.emit(lookup)
+
+ case *ast.TypeAssertExpr:
+ return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen)
+
+ case *ast.UnaryExpr: // must be receive <-
+ unop := &UnOp{
+ Op: token.ARROW,
+ X: b.expr(fn, e.X),
+ CommaOk: true,
+ }
+ unop.setType(typ)
+ unop.setPos(e.OpPos)
+ return fn.emit(unop)
+ }
+ panic(fmt.Sprintf("exprN(%T) in %s", e, fn))
+}
+
+// builtin emits to fn SSA instructions to implement a call to the
+// built-in function obj with the specified arguments
+// and return type. It returns the value defined by the result.
+//
+// The result is nil if no special handling was required; in this case
+// the caller should treat this like an ordinary library function
+// call.
+//
+func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
+ switch obj.Name() {
+ case "make":
+ switch typ.Underlying().(type) {
+ case *types.Slice:
+ n := b.expr(fn, args[1])
+ m := n
+ if len(args) == 3 {
+ m = b.expr(fn, args[2])
+ }
+ if m, ok := m.(*Const); ok {
+ // treat make([]T, n, m) as new([m]T)[:n]
+ cap := m.Int64()
+ at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap)
+ alloc := emitNew(fn, at, pos)
+ alloc.Comment = "makeslice"
+ v := &Slice{
+ X: alloc,
+ High: n,
+ }
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+ }
+ v := &MakeSlice{
+ Len: n,
+ Cap: m,
+ }
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+
+ case *types.Map:
+ var res Value
+ if len(args) == 2 {
+ res = b.expr(fn, args[1])
+ }
+ v := &MakeMap{Reserve: res}
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+
+ case *types.Chan:
+ var sz Value = vZero
+ if len(args) == 2 {
+ sz = b.expr(fn, args[1])
+ }
+ v := &MakeChan{Size: sz}
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+ }
+
+ case "new":
+ alloc := emitNew(fn, deref(typ), pos)
+ alloc.Comment = "new"
+ return alloc
+
+ case "len", "cap":
+ // Special case: len or cap of an array or *array is
+ // based on the type, not the value which may be nil.
+ // We must still evaluate the value, though. (If it
+ // was side-effect free, the whole call would have
+ // been constant-folded.)
+ t := deref(fn.Pkg.typeOf(args[0])).Underlying()
+ if at, ok := t.(*types.Array); ok {
+ b.expr(fn, args[0]) // for effects only
+ return intConst(at.Len())
+ }
+ // Otherwise treat as normal.
+
+ case "panic":
+ fn.emit(&Panic{
+ X: emitConv(fn, b.expr(fn, args[0]), tEface),
+ pos: pos,
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ return vTrue // any non-nil Value will do
+ }
+ return nil // treat all others as a regular function call
+}
+
+// addr lowers a single-result addressable expression e to SSA form,
+// emitting code to fn and returning the location (an lvalue) defined
+// by the expression.
+//
+// If escaping is true, addr marks the base variable of the
+// addressable expression e as being a potentially escaping pointer
+// value. For example, in this code:
+//
+// a := A{
+// b: [1]B{B{c: 1}}
+// }
+// return &a.b[0].c
+//
+// the application of & causes a.b[0].c to have its address taken,
+// which means that ultimately the local variable a must be
+// heap-allocated. This is a simple but very conservative escape
+// analysis.
+//
+// Operations forming potentially escaping pointers include:
+// - &x, including when implicit in method call or composite literals.
+// - a[:] iff a is an array (not *array)
+// - references to variables in lexically enclosing functions.
+//
+func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
+ switch e := e.(type) {
+ case *ast.Ident:
+ if isBlankIdent(e) {
+ return blank{}
+ }
+ obj := fn.Pkg.objectOf(e)
+ v := fn.Prog.packageLevelValue(obj) // var (address)
+ if v == nil {
+ v = fn.lookup(obj, escaping)
+ }
+ return &address{addr: v, pos: e.Pos(), expr: e}
+
+ case *ast.CompositeLit:
+ t := deref(fn.Pkg.typeOf(e))
+ var v *Alloc
+ if escaping {
+ v = emitNew(fn, t, e.Lbrace)
+ } else {
+ v = fn.addLocal(t, e.Lbrace)
+ }
+ v.Comment = "complit"
+ var sb storebuf
+ b.compLit(fn, v, e, true, &sb)
+ sb.emit(fn)
+ return &address{addr: v, pos: e.Lbrace, expr: e}
+
+ case *ast.ParenExpr:
+ return b.addr(fn, e.X, escaping)
+
+ case *ast.SelectorExpr:
+ sel, ok := fn.Pkg.info.Selections[e]
+ if !ok {
+ // qualified identifier
+ return b.addr(fn, e.Sel, escaping)
+ }
+ if sel.Kind() != types.FieldVal {
+ panic(sel)
+ }
+ wantAddr := true
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
+ last := len(sel.Index()) - 1
+ return &address{
+ addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
+ pos: e.Sel.Pos(),
+ expr: e.Sel,
+ }
+
+ case *ast.IndexExpr:
+ var x Value
+ var et types.Type
+ switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
+ case *types.Array:
+ x = b.addr(fn, e.X, escaping).address(fn)
+ et = types.NewPointer(t.Elem())
+ case *types.Pointer: // *array
+ x = b.expr(fn, e.X)
+ et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())
+ case *types.Slice:
+ x = b.expr(fn, e.X)
+ et = types.NewPointer(t.Elem())
+ case *types.Map:
+ return &element{
+ m: b.expr(fn, e.X),
+ k: emitConv(fn, b.expr(fn, e.Index), t.Key()),
+ t: t.Elem(),
+ pos: e.Lbrack,
+ }
+ default:
+ panic("unexpected container type in IndexExpr: " + t.String())
+ }
+ v := &IndexAddr{
+ X: x,
+ Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(et)
+ return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e}
+
+ case *ast.StarExpr:
+ return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
+ }
+
+ panic(fmt.Sprintf("unexpected address expression: %T", e))
+}
+
+type store struct {
+ lhs lvalue
+ rhs Value
+}
+
+type storebuf struct{ stores []store }
+
+func (sb *storebuf) store(lhs lvalue, rhs Value) {
+ sb.stores = append(sb.stores, store{lhs, rhs})
+}
+
+func (sb *storebuf) emit(fn *Function) {
+ for _, s := range sb.stores {
+ s.lhs.store(fn, s.rhs)
+ }
+}
+
+// assign emits to fn code to initialize the lvalue loc with the value
+// of expression e. If isZero is true, assign assumes that loc holds
+// the zero value for its type.
+//
+// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate
+// better code in some cases, e.g., for composite literals in an
+// addressable location.
+//
+// If sb is not nil, assign generates code to evaluate expression e, but
+// not to update loc. Instead, the necessary stores are appended to the
+// storebuf sb so that they can be executed later. This allows correct
+// in-place update of existing variables when the RHS is a composite
+// literal that may reference parts of the LHS.
+//
+func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) {
+ // Can we initialize it in place?
+ if e, ok := unparen(e).(*ast.CompositeLit); ok {
+ // A CompositeLit never evaluates to a pointer,
+ // so if the type of the location is a pointer,
+ // an &-operation is implied.
+ if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
+ if isPointer(loc.typ()) {
+ ptr := b.addr(fn, e, true).address(fn)
+ // copy address
+ if sb != nil {
+ sb.store(loc, ptr)
+ } else {
+ loc.store(fn, ptr)
+ }
+ return
+ }
+ }
+
+ if _, ok := loc.(*address); ok {
+ if isInterface(loc.typ()) {
+ // e.g. var x interface{} = T{...}
+ // Can't in-place initialize an interface value.
+ // Fall back to copying.
+ } else {
+ // x = T{...} or x := T{...}
+ addr := loc.address(fn)
+ if sb != nil {
+ b.compLit(fn, addr, e, isZero, sb)
+ } else {
+ var sb storebuf
+ b.compLit(fn, addr, e, isZero, &sb)
+ sb.emit(fn)
+ }
+
+ // Subtle: emit debug ref for aggregate types only;
+ // slice and map are handled by store ops in compLit.
+ switch loc.typ().Underlying().(type) {
+ case *types.Struct, *types.Array:
+ emitDebugRef(fn, e, addr, true)
+ }
+
+ return
+ }
+ }
+ }
+
+ // simple case: just copy
+ rhs := b.expr(fn, e)
+ if sb != nil {
+ sb.store(loc, rhs)
+ } else {
+ loc.store(fn, rhs)
+ }
+}
+
+// expr lowers a single-result expression e to SSA form, emitting code
+// to fn and returning the Value defined by the expression.
+//
+func (b *builder) expr(fn *Function, e ast.Expr) Value {
+ e = unparen(e)
+
+ tv := fn.Pkg.info.Types[e]
+
+ // Is expression a constant?
+ if tv.Value != nil {
+ return NewConst(tv.Value, tv.Type)
+ }
+
+ var v Value
+ if tv.Addressable() {
+ // Prefer pointer arithmetic ({Index,Field}Addr) followed
+ // by Load over subelement extraction (e.g. Index, Field),
+ // to avoid large copies.
+ v = b.addr(fn, e, false).load(fn)
+ } else {
+ v = b.expr0(fn, e, tv)
+ }
+ if fn.debugInfo() {
+ emitDebugRef(fn, e, v, false)
+ }
+ return v
+}
+
+func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
+ switch e := e.(type) {
+ case *ast.BasicLit:
+ panic("non-constant BasicLit") // unreachable
+
+ case *ast.FuncLit:
+ fn2 := &Function{
+ name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
+ Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
+ pos: e.Type.Func,
+ parent: fn,
+ Pkg: fn.Pkg,
+ Prog: fn.Prog,
+ syntax: e,
+ }
+ fn.AnonFuncs = append(fn.AnonFuncs, fn2)
+ b.buildFunction(fn2)
+ if fn2.FreeVars == nil {
+ return fn2
+ }
+ v := &MakeClosure{Fn: fn2}
+ v.setType(tv.Type)
+ for _, fv := range fn2.FreeVars {
+ v.Bindings = append(v.Bindings, fv.outer)
+ fv.outer = nil
+ }
+ return fn.emit(v)
+
+ case *ast.TypeAssertExpr: // single-result form only
+ return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen)
+
+ case *ast.CallExpr:
+ if fn.Pkg.info.Types[e.Fun].IsType() {
+ // Explicit type conversion, e.g. string(x) or big.Int(x)
+ x := b.expr(fn, e.Args[0])
+ y := emitConv(fn, x, tv.Type)
+ if y != x {
+ switch y := y.(type) {
+ case *Convert:
+ y.pos = e.Lparen
+ case *ChangeType:
+ y.pos = e.Lparen
+ case *MakeInterface:
+ y.pos = e.Lparen
+ }
+ }
+ return y
+ }
+ // Call to "intrinsic" built-ins, e.g. new, make, panic.
+ if id, ok := unparen(e.Fun).(*ast.Ident); ok {
+ if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok {
+ if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil {
+ return v
+ }
+ }
+ }
+ // Regular function call.
+ var v Call
+ b.setCall(fn, e, &v.Call)
+ v.setType(tv.Type)
+ return fn.emit(&v)
+
+ case *ast.UnaryExpr:
+ switch e.Op {
+ case token.AND: // &X --- potentially escaping.
+ addr := b.addr(fn, e.X, true)
+ if _, ok := unparen(e.X).(*ast.StarExpr); ok {
+ // &*p must panic if p is nil (http://golang.org/s/go12nil).
+ // For simplicity, we'll just (suboptimally) rely
+ // on the side effects of a load.
+ // TODO(adonovan): emit dedicated nilcheck.
+ addr.load(fn)
+ }
+ return addr.address(fn)
+ case token.ADD:
+ return b.expr(fn, e.X)
+ case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
+ v := &UnOp{
+ Op: e.Op,
+ X: b.expr(fn, e.X),
+ }
+ v.setPos(e.OpPos)
+ v.setType(tv.Type)
+ return fn.emit(v)
+ default:
+ panic(e.Op)
+ }
+
+ case *ast.BinaryExpr:
+ switch e.Op {
+ case token.LAND, token.LOR:
+ return b.logicalBinop(fn, e)
+ case token.SHL, token.SHR:
+ fallthrough
+ case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+ return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos)
+
+ case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
+ cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
+ // The type of x==y may be UntypedBool.
+ return emitConv(fn, cmp, DefaultType(tv.Type))
+ default:
+ panic("illegal op in BinaryExpr: " + e.Op.String())
+ }
+
+ case *ast.SliceExpr:
+ var low, high, max Value
+ var x Value
+ switch fn.Pkg.typeOf(e.X).Underlying().(type) {
+ case *types.Array:
+ // Potentially escaping.
+ x = b.addr(fn, e.X, true).address(fn)
+ case *types.Basic, *types.Slice, *types.Pointer: // *array
+ x = b.expr(fn, e.X)
+ default:
+ panic("unreachable")
+ }
+ if e.High != nil {
+ high = b.expr(fn, e.High)
+ }
+ if e.Low != nil {
+ low = b.expr(fn, e.Low)
+ }
+ if e.Slice3 {
+ max = b.expr(fn, e.Max)
+ }
+ v := &Slice{
+ X: x,
+ Low: low,
+ High: high,
+ Max: max,
+ }
+ v.setPos(e.Lbrack)
+ v.setType(tv.Type)
+ return fn.emit(v)
+
+ case *ast.Ident:
+ obj := fn.Pkg.info.Uses[e]
+ // Universal built-in or nil?
+ switch obj := obj.(type) {
+ case *types.Builtin:
+ return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
+ case *types.Nil:
+ return nilConst(tv.Type)
+ }
+ // Package-level func or var?
+ if v := fn.Prog.packageLevelValue(obj); v != nil {
+ if _, ok := obj.(*types.Var); ok {
+ return emitLoad(fn, v) // var (address)
+ }
+ return v // (func)
+ }
+ // Local var.
+ return emitLoad(fn, fn.lookup(obj, false)) // var (address)
+
+ case *ast.SelectorExpr:
+ sel, ok := fn.Pkg.info.Selections[e]
+ if !ok {
+ // qualified identifier
+ return b.expr(fn, e.Sel)
+ }
+ switch sel.Kind() {
+ case types.MethodExpr:
+ // (*T).f or T.f, the method f from the method-set of type T.
+ // The result is a "thunk".
+ return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type)
+
+ case types.MethodVal:
+ // e.f where e is an expression and f is a method.
+ // The result is a "bound".
+ obj := sel.Obj().(*types.Func)
+ rt := recvType(obj)
+ wantAddr := isPointer(rt)
+ escaping := true
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
+ if isInterface(rt) {
+ // If v has interface type I,
+ // we must emit a check that v is non-nil.
+ // We use: typeassert v.(I).
+ emitTypeAssert(fn, v, rt, token.NoPos)
+ }
+ c := &MakeClosure{
+ Fn: makeBound(fn.Prog, obj),
+ Bindings: []Value{v},
+ }
+ c.setPos(e.Sel.Pos())
+ c.setType(tv.Type)
+ return fn.emit(c)
+
+ case types.FieldVal:
+ indices := sel.Index()
+ last := len(indices) - 1
+ v := b.expr(fn, e.X)
+ v = emitImplicitSelections(fn, v, indices[:last])
+ v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
+ return v
+ }
+
+ panic("unexpected expression-relative selector")
+
+ case *ast.IndexExpr:
+ switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
+ case *types.Array:
+ // Non-addressable array (in a register).
+ v := &Index{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(t.Elem())
+ return fn.emit(v)
+
+ case *types.Map:
+ // Maps are not addressable.
+ mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
+ v := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(mapt.Elem())
+ return fn.emit(v)
+
+ case *types.Basic: // => string
+ // Strings are not addressable.
+ v := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: b.expr(fn, e.Index),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(tByte)
+ return fn.emit(v)
+
+ case *types.Slice, *types.Pointer: // *array
+ // Addressable slice/array; use IndexAddr and Load.
+ return b.addr(fn, e, false).load(fn)
+
+ default:
+ panic("unexpected container type in IndexExpr: " + t.String())
+ }
+
+ case *ast.CompositeLit, *ast.StarExpr:
+ // Addressable types (lvalues)
+ return b.addr(fn, e, false).load(fn)
+ }
+
+ panic(fmt.Sprintf("unexpected expr: %T", e))
+}
+
+// stmtList emits to fn code for all statements in list.
+func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
+ for _, s := range list {
+ b.stmt(fn, s)
+ }
+}
+
+// receiver emits to fn code for expression e in the "receiver"
+// position of selection e.f (where f may be a field or a method) and
+// returns the effective receiver after applying the implicit field
+// selections of sel.
+//
+// wantAddr requests that the result is an an address. If
+// !sel.Indirect(), this may require that e be built in addr() mode; it
+// must thus be addressable.
+//
+// escaping is defined as per builder.addr().
+//
+func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
+ var v Value
+ if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
+ v = b.addr(fn, e, escaping).address(fn)
+ } else {
+ v = b.expr(fn, e)
+ }
+
+ last := len(sel.Index()) - 1
+ v = emitImplicitSelections(fn, v, sel.Index()[:last])
+ if !wantAddr && isPointer(v.Type()) {
+ v = emitLoad(fn, v)
+ }
+ return v
+}
+
+// setCallFunc populates the function parts of a CallCommon structure
+// (Func, Method, Recv, Args[0]) based on the kind of invocation
+// occurring in e.
+//
+func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ c.pos = e.Lparen
+
+ // Is this a method call?
+ if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
+ sel, ok := fn.Pkg.info.Selections[selector]
+ if ok && sel.Kind() == types.MethodVal {
+ obj := sel.Obj().(*types.Func)
+ recv := recvType(obj)
+ wantAddr := isPointer(recv)
+ escaping := true
+ v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
+ if isInterface(recv) {
+ // Invoke-mode call.
+ c.Value = v
+ c.Method = obj
+ } else {
+ // "Call"-mode call.
+ c.Value = fn.Prog.declaredFunc(obj)
+ c.Args = append(c.Args, v)
+ }
+ return
+ }
+
+ // sel.Kind()==MethodExpr indicates T.f() or (*T).f():
+ // a statically dispatched call to the method f in the
+ // method-set of T or *T. T may be an interface.
+ //
+ // e.Fun would evaluate to a concrete method, interface
+ // wrapper function, or promotion wrapper.
+ //
+ // For now, we evaluate it in the usual way.
+ //
+ // TODO(adonovan): opt: inline expr() here, to make the
+ // call static and to avoid generation of wrappers.
+ // It's somewhat tricky as it may consume the first
+ // actual parameter if the call is "invoke" mode.
+ //
+ // Examples:
+ // type T struct{}; func (T) f() {} // "call" mode
+ // type T interface { f() } // "invoke" mode
+ //
+ // type S struct{ T }
+ //
+ // var s S
+ // S.f(s)
+ // (*S).f(&s)
+ //
+ // Suggested approach:
+ // - consume the first actual parameter expression
+ // and build it with b.expr().
+ // - apply implicit field selections.
+ // - use MethodVal logic to populate fields of c.
+ }
+
+ // Evaluate the function operand in the usual way.
+ c.Value = b.expr(fn, e.Fun)
+}
+
+// emitCallArgs emits to f code for the actual parameters of call e to
+// a (possibly built-in) function of effective type sig.
+// The argument values are appended to args, which is then returned.
+//
+func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
+ // f(x, y, z...): pass slice z straight through.
+ if e.Ellipsis != 0 {
+ for i, arg := range e.Args {
+ v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type())
+ args = append(args, v)
+ }
+ return args
+ }
+
+ offset := len(args) // 1 if call has receiver, 0 otherwise
+
+ // Evaluate actual parameter expressions.
+ //
+ // If this is a chained call of the form f(g()) where g has
+ // multiple return values (MRV), they are flattened out into
+ // args; a suffix of them may end up in a varargs slice.
+ for _, arg := range e.Args {
+ v := b.expr(fn, arg)
+ if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
+ for i, n := 0, ttuple.Len(); i < n; i++ {
+ args = append(args, emitExtract(fn, v, i))
+ }
+ } else {
+ args = append(args, v)
+ }
+ }
+
+ // Actual->formal assignability conversions for normal parameters.
+ np := sig.Params().Len() // number of normal parameters
+ if sig.Variadic() {
+ np--
+ }
+ for i := 0; i < np; i++ {
+ args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type())
+ }
+
+ // Actual->formal assignability conversions for variadic parameter,
+ // and construction of slice.
+ if sig.Variadic() {
+ varargs := args[offset+np:]
+ st := sig.Params().At(np).Type().(*types.Slice)
+ vt := st.Elem()
+ if len(varargs) == 0 {
+ args = append(args, nilConst(st))
+ } else {
+ // Replace a suffix of args with a slice containing it.
+ at := types.NewArray(vt, int64(len(varargs)))
+ a := emitNew(fn, at, token.NoPos)
+ a.setPos(e.Rparen)
+ a.Comment = "varargs"
+ for i, arg := range varargs {
+ iaddr := &IndexAddr{
+ X: a,
+ Index: intConst(int64(i)),
+ }
+ iaddr.setType(types.NewPointer(vt))
+ fn.emit(iaddr)
+ emitStore(fn, iaddr, arg, arg.Pos())
+ }
+ s := &Slice{X: a}
+ s.setType(st)
+ args[offset+np] = fn.emit(s)
+ args = args[:offset+np+1]
+ }
+ }
+ return args
+}
+
+// setCall emits to fn code to evaluate all the parameters of a function
+// call e, and populates *c with those values.
+//
+func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ // First deal with the f(...) part and optional receiver.
+ b.setCallFunc(fn, e, c)
+
+ // Then append the other actual parameters.
+ sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature)
+ if sig == nil {
+ panic(fmt.Sprintf("no signature for call of %s", e.Fun))
+ }
+ c.Args = b.emitCallArgs(fn, sig, e, c.Args)
+}
+
+// assignOp emits to fn code to perform loc += incr or loc -= incr.
+func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token, pos token.Pos) {
+ oldv := loc.load(fn)
+ loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), pos))
+}
+
+// localValueSpec emits to fn code to define all of the vars in the
+// function-local ValueSpec, spec.
+//
+func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
+ switch {
+ case len(spec.Values) == len(spec.Names):
+ // e.g. var x, y = 0, 1
+ // 1:1 assignment
+ for i, id := range spec.Names {
+ if !isBlankIdent(id) {
+ fn.addLocalForIdent(id)
+ }
+ lval := b.addr(fn, id, false) // non-escaping
+ b.assign(fn, lval, spec.Values[i], true, nil)
+ }
+
+ case len(spec.Values) == 0:
+ // e.g. var x, y int
+ // Locals are implicitly zero-initialized.
+ for _, id := range spec.Names {
+ if !isBlankIdent(id) {
+ lhs := fn.addLocalForIdent(id)
+ if fn.debugInfo() {
+ emitDebugRef(fn, id, lhs, true)
+ }
+ }
+ }
+
+ default:
+ // e.g. var x, y = pos()
+ tuple := b.exprN(fn, spec.Values[0])
+ for i, id := range spec.Names {
+ if !isBlankIdent(id) {
+ fn.addLocalForIdent(id)
+ lhs := b.addr(fn, id, false) // non-escaping
+ lhs.store(fn, emitExtract(fn, tuple, i))
+ }
+ }
+ }
+}
+
+// assignStmt emits code to fn for a parallel assignment of rhss to lhss.
+// isDef is true if this is a short variable declaration (:=).
+//
+// Note the similarity with localValueSpec.
+//
+func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
+ // Side effects of all LHSs and RHSs must occur in left-to-right order.
+ lvals := make([]lvalue, len(lhss))
+ isZero := make([]bool, len(lhss))
+ for i, lhs := range lhss {
+ var lval lvalue = blank{}
+ if !isBlankIdent(lhs) {
+ if isDef {
+ if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil {
+ fn.addNamedLocal(obj)
+ isZero[i] = true
+ }
+ }
+ lval = b.addr(fn, lhs, false) // non-escaping
+ }
+ lvals[i] = lval
+ }
+ if len(lhss) == len(rhss) {
+ // Simple assignment: x = f() (!isDef)
+ // Parallel assignment: x, y = f(), g() (!isDef)
+ // or short var decl: x, y := f(), g() (isDef)
+ //
+ // In all cases, the RHSs may refer to the LHSs,
+ // so we need a storebuf.
+ var sb storebuf
+ for i := range rhss {
+ b.assign(fn, lvals[i], rhss[i], isZero[i], &sb)
+ }
+ sb.emit(fn)
+ } else {
+ // e.g. x, y = pos()
+ tuple := b.exprN(fn, rhss[0])
+ emitDebugRef(fn, rhss[0], tuple, false)
+ for i, lval := range lvals {
+ lval.store(fn, emitExtract(fn, tuple, i))
+ }
+ }
+}
+
+// arrayLen returns the length of the array whose composite literal elements are elts.
+func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
+ var max int64 = -1
+ var i int64 = -1
+ for _, e := range elts {
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ i = b.expr(fn, kv.Key).(*Const).Int64()
+ } else {
+ i++
+ }
+ if i > max {
+ max = i
+ }
+ }
+ return max + 1
+}
+
+// compLit emits to fn code to initialize a composite literal e at
+// address addr with type typ.
+//
+// Nested composite literals are recursively initialized in place
+// where possible. If isZero is true, compLit assumes that addr
+// holds the zero value for typ.
+//
+// Because the elements of a composite literal may refer to the
+// variables being updated, as in the second line below,
+// x := T{a: 1}
+// x = T{a: x.a}
+// all the reads must occur before all the writes. Thus all stores to
+// loc are emitted to the storebuf sb for later execution.
+//
+// A CompositeLit may have pointer type only in the recursive (nested)
+// case when the type name is implicit. e.g. in []*T{{}}, the inner
+// literal has type *T behaves like &T{}.
+// In that case, addr must hold a T, not a *T.
+//
+func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
+ typ := deref(fn.Pkg.typeOf(e))
+ switch t := typ.Underlying().(type) {
+ case *types.Struct:
+ if !isZero && len(e.Elts) != t.NumFields() {
+ // memclear
+ sb.store(&address{addr, e.Lbrace, nil},
+ zeroValue(fn, deref(addr.Type())))
+ isZero = true
+ }
+ for i, e := range e.Elts {
+ fieldIndex := i
+ pos := e.Pos()
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ fname := kv.Key.(*ast.Ident).Name
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ sf := t.Field(i)
+ if sf.Name() == fname {
+ fieldIndex = i
+ pos = kv.Colon
+ e = kv.Value
+ break
+ }
+ }
+ }
+ sf := t.Field(fieldIndex)
+ faddr := &FieldAddr{
+ X: addr,
+ Field: fieldIndex,
+ }
+ faddr.setType(types.NewPointer(sf.Type()))
+ fn.emit(faddr)
+ b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb)
+ }
+
+ case *types.Array, *types.Slice:
+ var at *types.Array
+ var array Value
+ switch t := t.(type) {
+ case *types.Slice:
+ at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
+ alloc := emitNew(fn, at, e.Lbrace)
+ alloc.Comment = "slicelit"
+ array = alloc
+ case *types.Array:
+ at = t
+ array = addr
+
+ if !isZero && int64(len(e.Elts)) != at.Len() {
+ // memclear
+ sb.store(&address{array, e.Lbrace, nil},
+ zeroValue(fn, deref(array.Type())))
+ }
+ }
+
+ var idx *Const
+ for _, e := range e.Elts {
+ pos := e.Pos()
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ idx = b.expr(fn, kv.Key).(*Const)
+ pos = kv.Colon
+ e = kv.Value
+ } else {
+ var idxval int64
+ if idx != nil {
+ idxval = idx.Int64() + 1
+ }
+ idx = intConst(idxval)
+ }
+ iaddr := &IndexAddr{
+ X: array,
+ Index: idx,
+ }
+ iaddr.setType(types.NewPointer(at.Elem()))
+ fn.emit(iaddr)
+ if t != at { // slice
+ // backing array is unaliased => storebuf not needed.
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil)
+ } else {
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb)
+ }
+ }
+
+ if t != at { // slice
+ s := &Slice{X: array}
+ s.setPos(e.Lbrace)
+ s.setType(typ)
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s))
+ }
+
+ case *types.Map:
+ m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))}
+ m.setPos(e.Lbrace)
+ m.setType(typ)
+ fn.emit(m)
+ for _, e := range e.Elts {
+ e := e.(*ast.KeyValueExpr)
+
+ // If a key expression in a map literal is itself a
+ // composite literal, the type may be omitted.
+ // For example:
+ // map[*struct{}]bool{{}: true}
+ // An &-operation may be implied:
+ // map[*struct{}]bool{&struct{}{}: true}
+ var key Value
+ if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) {
+ // A CompositeLit never evaluates to a pointer,
+ // so if the type of the location is a pointer,
+ // an &-operation is implied.
+ key = b.addr(fn, e.Key, true).address(fn)
+ } else {
+ key = b.expr(fn, e.Key)
+ }
+
+ loc := element{
+ m: m,
+ k: emitConv(fn, key, t.Key()),
+ t: t.Elem(),
+ pos: e.Colon,
+ }
+
+ // We call assign() only because it takes care
+ // of any &-operation required in the recursive
+ // case, e.g.,
+ // map[int]*struct{}{0: {}} implies &struct{}{}.
+ // In-place update is of course impossible,
+ // and no storebuf is needed.
+ b.assign(fn, &loc, e.Value, true, nil)
+ }
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m)
+
+ default:
+ panic("unexpected CompositeLit type: " + t.String())
+ }
+}
+
+// switchStmt emits to fn code for the switch statement s, optionally
+// labelled by label.
+//
+func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
+ // We treat SwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches()
+ // to those cases that are free of side effects.
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ var tag Value = vTrue
+ if s.Tag != nil {
+ tag = b.expr(fn, s.Tag)
+ }
+ done := fn.newBasicBlock("switch.done")
+ if label != nil {
+ label._break = done
+ }
+ // We pull the default case (if present) down to the end.
+ // But each fallthrough label must point to the next
+ // body block in source order, so we preallocate a
+ // body block (fallthru) for the next case.
+ // Unfortunately this makes for a confusing block order.
+ var dfltBody *[]ast.Stmt
+ var dfltFallthrough *BasicBlock
+ var fallthru, dfltBlock *BasicBlock
+ ncases := len(s.Body.List)
+ for i, clause := range s.Body.List {
+ body := fallthru
+ if body == nil {
+ body = fn.newBasicBlock("switch.body") // first case only
+ }
+
+ // Preallocate body block for the next case.
+ fallthru = done
+ if i+1 < ncases {
+ fallthru = fn.newBasicBlock("switch.body")
+ }
+
+ cc := clause.(*ast.CaseClause)
+ if cc.List == nil {
+ // Default case.
+ dfltBody = &cc.Body
+ dfltFallthrough = fallthru
+ dfltBlock = body
+ continue
+ }
+
+ var nextCond *BasicBlock
+ for _, cond := range cc.List {
+ nextCond = fn.newBasicBlock("switch.next")
+ // TODO(adonovan): opt: when tag==vTrue, we'd
+ // get better code if we use b.cond(cond)
+ // instead of BinOp(EQL, tag, b.expr(cond))
+ // followed by If. Don't forget conversions
+ // though.
+ cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos)
+ emitIf(fn, cond, body, nextCond)
+ fn.currentBlock = nextCond
+ }
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _fallthrough: fallthru,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = nextCond
+ }
+ if dfltBlock != nil {
+ emitJump(fn, dfltBlock)
+ fn.currentBlock = dfltBlock
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _fallthrough: dfltFallthrough,
+ }
+ b.stmtList(fn, *dfltBody)
+ fn.targets = fn.targets.tail
+ }
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// typeSwitchStmt emits to fn code for the type switch statement s, optionally
+// labelled by label.
+//
+func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
+ // We treat TypeSwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches().
+
+ // Typeswitch lowering:
+ //
+ // var x X
+ // switch y := x.(type) {
+ // case T1, T2: S1 // >1 (y := x)
+ // case nil: SN // nil (y := x)
+ // default: SD // 0 types (y := x)
+ // case T3: S3 // 1 type (y := x.(T3))
+ // }
+ //
+ // ...s.Init...
+ // x := eval x
+ // .caseT1:
+ // t1, ok1 := typeswitch,ok x <T1>
+ // if ok1 then goto S1 else goto .caseT2
+ // .caseT2:
+ // t2, ok2 := typeswitch,ok x <T2>
+ // if ok2 then goto S1 else goto .caseNil
+ // .S1:
+ // y := x
+ // ...S1...
+ // goto done
+ // .caseNil:
+ // if t2, ok2 := typeswitch,ok x <T2>
+ // if x == nil then goto SN else goto .caseT3
+ // .SN:
+ // y := x
+ // ...SN...
+ // goto done
+ // .caseT3:
+ // t3, ok3 := typeswitch,ok x <T3>
+ // if ok3 then goto S3 else goto default
+ // .S3:
+ // y := t3
+ // ...S3...
+ // goto done
+ // .default:
+ // y := x
+ // ...SD...
+ // goto done
+ // .done:
+
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+
+ var x Value
+ switch ass := s.Assign.(type) {
+ case *ast.ExprStmt: // x.(type)
+ x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X)
+ case *ast.AssignStmt: // y := x.(type)
+ x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
+ }
+
+ done := fn.newBasicBlock("typeswitch.done")
+ if label != nil {
+ label._break = done
+ }
+ var default_ *ast.CaseClause
+ for _, clause := range s.Body.List {
+ cc := clause.(*ast.CaseClause)
+ if cc.List == nil {
+ default_ = cc
+ continue
+ }
+ body := fn.newBasicBlock("typeswitch.body")
+ var next *BasicBlock
+ var casetype types.Type
+ var ti Value // ti, ok := typeassert,ok x <Ti>
+ for _, cond := range cc.List {
+ next = fn.newBasicBlock("typeswitch.next")
+ casetype = fn.Pkg.typeOf(cond)
+ var condv Value
+ if casetype == tUntypedNil {
+ condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos)
+ ti = x
+ } else {
+ yok := emitTypeTest(fn, x, casetype, cc.Case)
+ ti = emitExtract(fn, yok, 0)
+ condv = emitExtract(fn, yok, 1)
+ }
+ emitIf(fn, condv, body, next)
+ fn.currentBlock = next
+ }
+ if len(cc.List) != 1 {
+ ti = x
+ }
+ fn.currentBlock = body
+ b.typeCaseBody(fn, cc, ti, done)
+ fn.currentBlock = next
+ }
+ if default_ != nil {
+ b.typeCaseBody(fn, default_, x, done)
+ } else {
+ emitJump(fn, done)
+ }
+ fn.currentBlock = done
+}
+
+func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
+ if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
+ // In a switch y := x.(type), each case clause
+ // implicitly declares a distinct object y.
+ // In a single-type case, y has that type.
+ // In multi-type cases, 'case nil' and default,
+ // y has the same type as the interface operand.
+ emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos())
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+}
+
+// selectStmt emits to fn code for the select statement s, optionally
+// labelled by label.
+//
+func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
+ // A blocking select of a single case degenerates to a
+ // simple send or receive.
+ // TODO(adonovan): opt: is this optimization worth its weight?
+ if len(s.Body.List) == 1 {
+ clause := s.Body.List[0].(*ast.CommClause)
+ if clause.Comm != nil {
+ b.stmt(fn, clause.Comm)
+ done := fn.newBasicBlock("select.done")
+ if label != nil {
+ label._break = done
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, clause.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = done
+ return
+ }
+ }
+
+ // First evaluate all channels in all cases, and find
+ // the directions of each state.
+ var states []*SelectState
+ blocking := true
+ debugInfo := fn.debugInfo()
+ for _, clause := range s.Body.List {
+ var st *SelectState
+ switch comm := clause.(*ast.CommClause).Comm.(type) {
+ case nil: // default case
+ blocking = false
+ continue
+
+ case *ast.SendStmt: // ch<- i
+ ch := b.expr(fn, comm.Chan)
+ st = &SelectState{
+ Dir: types.SendOnly,
+ Chan: ch,
+ Send: emitConv(fn, b.expr(fn, comm.Value),
+ ch.Type().Underlying().(*types.Chan).Elem()),
+ Pos: comm.Arrow,
+ }
+ if debugInfo {
+ st.DebugNode = comm
+ }
+
+ case *ast.AssignStmt: // x := <-ch
+ recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr)
+ st = &SelectState{
+ Dir: types.RecvOnly,
+ Chan: b.expr(fn, recv.X),
+ Pos: recv.OpPos,
+ }
+ if debugInfo {
+ st.DebugNode = recv
+ }
+
+ case *ast.ExprStmt: // <-ch
+ recv := unparen(comm.X).(*ast.UnaryExpr)
+ st = &SelectState{
+ Dir: types.RecvOnly,
+ Chan: b.expr(fn, recv.X),
+ Pos: recv.OpPos,
+ }
+ if debugInfo {
+ st.DebugNode = recv
+ }
+ }
+ states = append(states, st)
+ }
+
+ // We dispatch on the (fair) result of Select using a
+ // sequential if-else chain, in effect:
+ //
+ // idx, recvOk, r0...r_n-1 := select(...)
+ // if idx == 0 { // receive on channel 0 (first receive => r0)
+ // x, ok := r0, recvOk
+ // ...state0...
+ // } else if v == 1 { // send on channel 1
+ // ...state1...
+ // } else {
+ // ...default...
+ // }
+ sel := &Select{
+ States: states,
+ Blocking: blocking,
+ }
+ sel.setPos(s.Select)
+ var vars []*types.Var
+ vars = append(vars, varIndex, varOk)
+ for _, st := range states {
+ if st.Dir == types.RecvOnly {
+ tElem := st.Chan.Type().Underlying().(*types.Chan).Elem()
+ vars = append(vars, anonVar(tElem))
+ }
+ }
+ sel.setType(types.NewTuple(vars...))
+
+ fn.emit(sel)
+ idx := emitExtract(fn, sel, 0)
+
+ done := fn.newBasicBlock("select.done")
+ if label != nil {
+ label._break = done
+ }
+
+ var defaultBody *[]ast.Stmt
+ state := 0
+ r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
+ for _, cc := range s.Body.List {
+ clause := cc.(*ast.CommClause)
+ if clause.Comm == nil {
+ defaultBody = &clause.Body
+ continue
+ }
+ body := fn.newBasicBlock("select.body")
+ next := fn.newBasicBlock("select.next")
+ emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next)
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ switch comm := clause.Comm.(type) {
+ case *ast.ExprStmt: // <-ch
+ if debugInfo {
+ v := emitExtract(fn, sel, r)
+ emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
+ }
+ r++
+
+ case *ast.AssignStmt: // x := <-states[state].Chan
+ if comm.Tok == token.DEFINE {
+ fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident))
+ }
+ x := b.addr(fn, comm.Lhs[0], false) // non-escaping
+ v := emitExtract(fn, sel, r)
+ if debugInfo {
+ emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
+ }
+ x.store(fn, v)
+
+ if len(comm.Lhs) == 2 { // x, ok := ...
+ if comm.Tok == token.DEFINE {
+ fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident))
+ }
+ ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
+ ok.store(fn, emitExtract(fn, sel, 1))
+ }
+ r++
+ }
+ b.stmtList(fn, clause.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = next
+ state++
+ }
+ if defaultBody != nil {
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, *defaultBody)
+ fn.targets = fn.targets.tail
+ } else {
+ // A blocking select must match some case.
+ // (This should really be a runtime.errorString, not a string.)
+ fn.emit(&Panic{
+ X: emitConv(fn, stringConst("blocking select matched no case"), tEface),
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ }
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// forStmt emits to fn code for the for statement s, optionally
+// labelled by label.
+//
+func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
+ // ...init...
+ // jump loop
+ // loop:
+ // if cond goto body else done
+ // body:
+ // ...body...
+ // jump post
+ // post: (target of continue)
+ // ...post...
+ // jump loop
+ // done: (target of break)
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ body := fn.newBasicBlock("for.body")
+ done := fn.newBasicBlock("for.done") // target of 'break'
+ loop := body // target of back-edge
+ if s.Cond != nil {
+ loop = fn.newBasicBlock("for.loop")
+ }
+ cont := loop // target of 'continue'
+ if s.Post != nil {
+ cont = fn.newBasicBlock("for.post")
+ }
+ if label != nil {
+ label._break = done
+ label._continue = cont
+ }
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+ if loop != body {
+ b.cond(fn, s.Cond, body, done)
+ fn.currentBlock = body
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: cont,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, cont)
+
+ if s.Post != nil {
+ fn.currentBlock = cont
+ b.stmt(fn, s.Post)
+ emitJump(fn, loop) // back-edge
+ }
+ fn.currentBlock = done
+}
+
+// rangeIndexed emits to fn the header for an integer-indexed loop
+// over array, *array or slice value x.
+// The v result is defined only if tv is non-nil.
+// forPos is the position of the "for" token.
+//
+func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
+ //
+ // length = len(x)
+ // index = -1
+ // loop: (target of continue)
+ // index++
+ // if index < length goto body else done
+ // body:
+ // k = index
+ // v = x[index]
+ // ...body...
+ // jump loop
+ // done: (target of break)
+
+ // Determine number of iterations.
+ var length Value
+ if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok {
+ // For array or *array, the number of iterations is
+ // known statically thanks to the type. We avoid a
+ // data dependence upon x, permitting later dead-code
+ // elimination if x is pure, static unrolling, etc.
+ // Ranging over a nil *array may have >0 iterations.
+ // We still generate code for x, in case it has effects.
+ length = intConst(arr.Len())
+ } else {
+ // length = len(x).
+ var c Call
+ c.Call.Value = makeLen(x.Type())
+ c.Call.Args = []Value{x}
+ c.setType(tInt)
+ length = fn.emit(&c)
+ }
+
+ index := fn.addLocal(tInt, token.NoPos)
+ emitStore(fn, index, intConst(-1), pos)
+
+ loop = fn.newBasicBlock("rangeindex.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ incr := &BinOp{
+ Op: token.ADD,
+ X: emitLoad(fn, index),
+ Y: vOne,
+ }
+ incr.setType(tInt)
+ emitStore(fn, index, fn.emit(incr), pos)
+
+ body := fn.newBasicBlock("rangeindex.body")
+ done = fn.newBasicBlock("rangeindex.done")
+ emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done)
+ fn.currentBlock = body
+
+ k = emitLoad(fn, index)
+ if tv != nil {
+ switch t := x.Type().Underlying().(type) {
+ case *types.Array:
+ instr := &Index{
+ X: x,
+ Index: k,
+ }
+ instr.setType(t.Elem())
+ v = fn.emit(instr)
+
+ case *types.Pointer: // *array
+ instr := &IndexAddr{
+ X: x,
+ Index: k,
+ }
+ instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
+ v = emitLoad(fn, fn.emit(instr))
+
+ case *types.Slice:
+ instr := &IndexAddr{
+ X: x,
+ Index: k,
+ }
+ instr.setType(types.NewPointer(t.Elem()))
+ v = emitLoad(fn, fn.emit(instr))
+
+ default:
+ panic("rangeIndexed x:" + t.String())
+ }
+ }
+ return
+}
+
+// rangeIter emits to fn the header for a loop using
+// Range/Next/Extract to iterate over map or string value x.
+// tk and tv are the types of the key/value results k and v, or nil
+// if the respective component is not wanted.
+//
+func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
+ //
+ // it = range x
+ // loop: (target of continue)
+ // okv = next it (ok, key, value)
+ // ok = extract okv #0
+ // if ok goto body else done
+ // body:
+ // k = extract okv #1
+ // v = extract okv #2
+ // ...body...
+ // jump loop
+ // done: (target of break)
+ //
+
+ if tk == nil {
+ tk = tInvalid
+ }
+ if tv == nil {
+ tv = tInvalid
+ }
+
+ rng := &Range{X: x}
+ rng.setPos(pos)
+ rng.setType(tRangeIter)
+ it := fn.emit(rng)
+
+ loop = fn.newBasicBlock("rangeiter.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ _, isString := x.Type().Underlying().(*types.Basic)
+
+ okv := &Next{
+ Iter: it,
+ IsString: isString,
+ }
+ okv.setType(types.NewTuple(
+ varOk,
+ newVar("k", tk),
+ newVar("v", tv),
+ ))
+ fn.emit(okv)
+
+ body := fn.newBasicBlock("rangeiter.body")
+ done = fn.newBasicBlock("rangeiter.done")
+ emitIf(fn, emitExtract(fn, okv, 0), body, done)
+ fn.currentBlock = body
+
+ if tk != tInvalid {
+ k = emitExtract(fn, okv, 1)
+ }
+ if tv != tInvalid {
+ v = emitExtract(fn, okv, 2)
+ }
+ return
+}
+
+// rangeChan emits to fn the header for a loop that receives from
+// channel x until it fails.
+// tk is the channel's element type, or nil if the k result is
+// not wanted
+// pos is the position of the '=' or ':=' token.
+//
+func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
+ //
+ // loop: (target of continue)
+ // ko = <-x (key, ok)
+ // ok = extract ko #1
+ // if ok goto body else done
+ // body:
+ // k = extract ko #0
+ // ...
+ // goto loop
+ // done: (target of break)
+
+ loop = fn.newBasicBlock("rangechan.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+ recv := &UnOp{
+ Op: token.ARROW,
+ X: x,
+ CommaOk: true,
+ }
+ recv.setPos(pos)
+ recv.setType(types.NewTuple(
+ newVar("k", x.Type().Underlying().(*types.Chan).Elem()),
+ varOk,
+ ))
+ ko := fn.emit(recv)
+ body := fn.newBasicBlock("rangechan.body")
+ done = fn.newBasicBlock("rangechan.done")
+ emitIf(fn, emitExtract(fn, ko, 1), body, done)
+ fn.currentBlock = body
+ if tk != nil {
+ k = emitExtract(fn, ko, 0)
+ }
+ return
+}
+
+// rangeStmt emits to fn code for the range statement s, optionally
+// labelled by label.
+//
+func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
+ var tk, tv types.Type
+ if s.Key != nil && !isBlankIdent(s.Key) {
+ tk = fn.Pkg.typeOf(s.Key)
+ }
+ if s.Value != nil && !isBlankIdent(s.Value) {
+ tv = fn.Pkg.typeOf(s.Value)
+ }
+
+ // If iteration variables are defined (:=), this
+ // occurs once outside the loop.
+ //
+ // Unlike a short variable declaration, a RangeStmt
+ // using := never redeclares an existing variable; it
+ // always creates a new one.
+ if s.Tok == token.DEFINE {
+ if tk != nil {
+ fn.addLocalForIdent(s.Key.(*ast.Ident))
+ }
+ if tv != nil {
+ fn.addLocalForIdent(s.Value.(*ast.Ident))
+ }
+ }
+
+ x := b.expr(fn, s.X)
+
+ var k, v Value
+ var loop, done *BasicBlock
+ switch rt := x.Type().Underlying().(type) {
+ case *types.Slice, *types.Array, *types.Pointer: // *array
+ k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For)
+
+ case *types.Chan:
+ k, loop, done = b.rangeChan(fn, x, tk, s.For)
+
+ case *types.Map, *types.Basic: // string
+ k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
+
+ default:
+ panic("Cannot range over: " + rt.String())
+ }
+
+ // Evaluate both LHS expressions before we update either.
+ var kl, vl lvalue
+ if tk != nil {
+ kl = b.addr(fn, s.Key, false) // non-escaping
+ }
+ if tv != nil {
+ vl = b.addr(fn, s.Value, false) // non-escaping
+ }
+ if tk != nil {
+ kl.store(fn, k)
+ }
+ if tv != nil {
+ vl.store(fn, v)
+ }
+
+ if label != nil {
+ label._break = done
+ label._continue = loop
+ }
+
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: loop,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, loop) // back-edge
+ fn.currentBlock = done
+}
+
+// stmt lowers statement s to SSA form, emitting code to fn.
+func (b *builder) stmt(fn *Function, _s ast.Stmt) {
+ // The label of the current statement. If non-nil, its _goto
+ // target is always set; its _break and _continue are set only
+ // within the body of switch/typeswitch/select/for/range.
+ // It is effectively an additional default-nil parameter of stmt().
+ var label *lblock
+start:
+ switch s := _s.(type) {
+ case *ast.EmptyStmt:
+ // ignore. (Usually removed by gofmt.)
+
+ case *ast.DeclStmt: // Con, Var or Typ
+ d := s.Decl.(*ast.GenDecl)
+ if d.Tok == token.VAR {
+ for _, spec := range d.Specs {
+ if vs, ok := spec.(*ast.ValueSpec); ok {
+ b.localValueSpec(fn, vs)
+ }
+ }
+ }
+
+ case *ast.LabeledStmt:
+ label = fn.labelledBlock(s.Label)
+ emitJump(fn, label._goto)
+ fn.currentBlock = label._goto
+ _s = s.Stmt
+ goto start // effectively: tailcall stmt(fn, s.Stmt, label)
+
+ case *ast.ExprStmt:
+ b.expr(fn, s.X)
+
+ case *ast.SendStmt:
+ fn.emit(&Send{
+ Chan: b.expr(fn, s.Chan),
+ X: emitConv(fn, b.expr(fn, s.Value),
+ fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
+ pos: s.Arrow,
+ })
+
+ case *ast.IncDecStmt:
+ op := token.ADD
+ if s.Tok == token.DEC {
+ op = token.SUB
+ }
+ loc := b.addr(fn, s.X, false)
+ b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op, s.Pos())
+
+ case *ast.AssignStmt:
+ switch s.Tok {
+ case token.ASSIGN, token.DEFINE:
+ b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
+
+ default: // +=, etc.
+ op := s.Tok + token.ADD - token.ADD_ASSIGN
+ b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos())
+ }
+
+ case *ast.GoStmt:
+ // The "intrinsics" new/make/len/cap are forbidden here.
+ // panic is treated like an ordinary function call.
+ v := Go{pos: s.Go}
+ b.setCall(fn, s.Call, &v.Call)
+ fn.emit(&v)
+
+ case *ast.DeferStmt:
+ // The "intrinsics" new/make/len/cap are forbidden here.
+ // panic is treated like an ordinary function call.
+ v := Defer{pos: s.Defer}
+ b.setCall(fn, s.Call, &v.Call)
+ fn.emit(&v)
+
+ // A deferred call can cause recovery from panic,
+ // and control resumes at the Recover block.
+ createRecoverBlock(fn)
+
+ case *ast.ReturnStmt:
+ var results []Value
+ if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 {
+ // Return of one expression in a multi-valued function.
+ tuple := b.exprN(fn, s.Results[0])
+ ttuple := tuple.Type().(*types.Tuple)
+ for i, n := 0, ttuple.Len(); i < n; i++ {
+ results = append(results,
+ emitConv(fn, emitExtract(fn, tuple, i),
+ fn.Signature.Results().At(i).Type()))
+ }
+ } else {
+ // 1:1 return, or no-arg return in non-void function.
+ for i, r := range s.Results {
+ v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type())
+ results = append(results, v)
+ }
+ }
+ if fn.namedResults != nil {
+ // Function has named result parameters (NRPs).
+ // Perform parallel assignment of return operands to NRPs.
+ for i, r := range results {
+ emitStore(fn, fn.namedResults[i], r, s.Return)
+ }
+ }
+ // Run function calls deferred in this
+ // function when explicitly returning from it.
+ fn.emit(new(RunDefers))
+ if fn.namedResults != nil {
+ // Reload NRPs to form the result tuple.
+ results = results[:0]
+ for _, r := range fn.namedResults {
+ results = append(results, emitLoad(fn, r))
+ }
+ }
+ fn.emit(&Return{Results: results, pos: s.Return})
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+
+ case *ast.BranchStmt:
+ var block *BasicBlock
+ switch s.Tok {
+ case token.BREAK:
+ if s.Label != nil {
+ block = fn.labelledBlock(s.Label)._break
+ } else {
+ for t := fn.targets; t != nil && block == nil; t = t.tail {
+ block = t._break
+ }
+ }
+
+ case token.CONTINUE:
+ if s.Label != nil {
+ block = fn.labelledBlock(s.Label)._continue
+ } else {
+ for t := fn.targets; t != nil && block == nil; t = t.tail {
+ block = t._continue
+ }
+ }
+
+ case token.FALLTHROUGH:
+ for t := fn.targets; t != nil && block == nil; t = t.tail {
+ block = t._fallthrough
+ }
+
+ case token.GOTO:
+ block = fn.labelledBlock(s.Label)._goto
+ }
+ emitJump(fn, block)
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+
+ case *ast.BlockStmt:
+ b.stmtList(fn, s.List)
+
+ case *ast.IfStmt:
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ then := fn.newBasicBlock("if.then")
+ done := fn.newBasicBlock("if.done")
+ els := done
+ if s.Else != nil {
+ els = fn.newBasicBlock("if.else")
+ }
+ b.cond(fn, s.Cond, then, els)
+ fn.currentBlock = then
+ b.stmt(fn, s.Body)
+ emitJump(fn, done)
+
+ if s.Else != nil {
+ fn.currentBlock = els
+ b.stmt(fn, s.Else)
+ emitJump(fn, done)
+ }
+
+ fn.currentBlock = done
+
+ case *ast.SwitchStmt:
+ b.switchStmt(fn, s, label)
+
+ case *ast.TypeSwitchStmt:
+ b.typeSwitchStmt(fn, s, label)
+
+ case *ast.SelectStmt:
+ b.selectStmt(fn, s, label)
+
+ case *ast.ForStmt:
+ b.forStmt(fn, s, label)
+
+ case *ast.RangeStmt:
+ b.rangeStmt(fn, s, label)
+
+ default:
+ panic(fmt.Sprintf("unexpected statement kind: %T", s))
+ }
+}
+
+// buildFunction builds SSA code for the body of function fn. Idempotent.
+func (b *builder) buildFunction(fn *Function) {
+ if fn.Blocks != nil {
+ return // building already started
+ }
+
+ var recvField *ast.FieldList
+ var body *ast.BlockStmt
+ var functype *ast.FuncType
+ switch n := fn.syntax.(type) {
+ case nil:
+ return // not a Go source function. (Synthetic, or from object file.)
+ case *ast.FuncDecl:
+ functype = n.Type
+ recvField = n.Recv
+ body = n.Body
+ case *ast.FuncLit:
+ functype = n.Type
+ body = n.Body
+ default:
+ panic(n)
+ }
+
+ if body == nil {
+ // External function.
+ if fn.Params == nil {
+ // This condition ensures we add a non-empty
+ // params list once only, but we may attempt
+ // the degenerate empty case repeatedly.
+ // TODO(adonovan): opt: don't do that.
+
+ // We set Function.Params even though there is no body
+ // code to reference them. This simplifies clients.
+ if recv := fn.Signature.Recv(); recv != nil {
+ fn.addParamObj(recv)
+ }
+ params := fn.Signature.Params()
+ for i, n := 0, params.Len(); i < n; i++ {
+ fn.addParamObj(params.At(i))
+ }
+ }
+ return
+ }
+ if fn.Prog.mode&LogSource != 0 {
+ defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
+ }
+ fn.startBody()
+ fn.createSyntacticParams(recvField, functype)
+ b.stmt(fn, body)
+ if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
+ // Control fell off the end of the function's body block.
+ //
+ // Block optimizations eliminate the current block, if
+ // unreachable. It is a builder invariant that
+ // if this no-arg return is ill-typed for
+ // fn.Signature.Results, this block must be
+ // unreachable. The sanity checker checks this.
+ fn.emit(new(RunDefers))
+ fn.emit(new(Return))
+ }
+ fn.finishBody()
+}
+
+// buildFuncDecl builds SSA code for the function or method declared
+// by decl in package pkg.
+//
+func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
+ id := decl.Name
+ if isBlankIdent(id) {
+ return // discard
+ }
+ fn := pkg.values[pkg.info.Defs[id]].(*Function)
+ if decl.Recv == nil && id.Name == "init" {
+ var v Call
+ v.Call.Value = fn
+ v.setType(types.NewTuple())
+ pkg.init.emit(&v)
+ }
+ b.buildFunction(fn)
+}
+
+// Build calls Package.Build for each package in prog.
+// Building occurs in parallel unless the BuildSerially mode flag was set.
+//
+// Build is intended for whole-program analysis; a typical compiler
+// need only build a single package.
+//
+// Build is idempotent and thread-safe.
+//
+func (prog *Program) Build() {
+ var wg sync.WaitGroup
+ for _, p := range prog.packages {
+ if prog.mode&BuildSerially != 0 {
+ p.Build()
+ } else {
+ wg.Add(1)
+ go func(p *Package) {
+ p.Build()
+ wg.Done()
+ }(p)
+ }
+ }
+ wg.Wait()
+}
+
+// Build builds SSA code for all functions and vars in package p.
+//
+// Precondition: CreatePackage must have been called for all of p's
+// direct imports (and hence its direct imports must have been
+// error-free).
+//
+// Build is idempotent and thread-safe.
+//
+func (p *Package) Build() { p.buildOnce.Do(p.build) }
+
+func (p *Package) build() {
+ if p.info == nil {
+ return // synthetic package, e.g. "testmain"
+ }
+ if p.files == nil {
+ p.info = nil
+ return // package loaded from export data
+ }
+
+ // Ensure we have runtime type info for all exported members.
+ // TODO(adonovan): ideally belongs in memberFromObject, but
+ // that would require package creation in topological order.
+ for name, mem := range p.Members {
+ if ast.IsExported(name) {
+ p.Prog.needMethodsOf(mem.Type())
+ }
+ }
+ if p.Prog.mode&LogSource != 0 {
+ defer logStack("build %s", p)()
+ }
+ init := p.init
+ init.startBody()
+
+ var done *BasicBlock
+
+ if p.Prog.mode&BareInits == 0 {
+ // Make init() skip if package is already initialized.
+ initguard := p.Var("init$guard")
+ doinit := init.newBasicBlock("init.start")
+ done = init.newBasicBlock("init.done")
+ emitIf(init, emitLoad(init, initguard), done, doinit)
+ init.currentBlock = doinit
+ emitStore(init, initguard, vTrue, token.NoPos)
+
+ // Call the init() function of each package we import.
+ for _, pkg := range p.Pkg.Imports() {
+ prereq := p.Prog.packages[pkg]
+ if prereq == nil {
+ panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
+ }
+ var v Call
+ v.Call.Value = prereq.init
+ v.Call.pos = init.pos
+ v.setType(types.NewTuple())
+ init.emit(&v)
+ }
+ }
+
+ var b builder
+
+ // Initialize package-level vars in correct order.
+ for _, varinit := range p.info.InitOrder {
+ if init.Prog.mode&LogSource != 0 {
+ fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
+ varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
+ }
+ if len(varinit.Lhs) == 1 {
+ // 1:1 initialization: var x, y = a(), b()
+ var lval lvalue
+ if v := varinit.Lhs[0]; v.Name() != "_" {
+ lval = &address{addr: p.values[v].(*Global), pos: v.Pos()}
+ } else {
+ lval = blank{}
+ }
+ b.assign(init, lval, varinit.Rhs, true, nil)
+ } else {
+ // n:1 initialization: var x, y := f()
+ tuple := b.exprN(init, varinit.Rhs)
+ for i, v := range varinit.Lhs {
+ if v.Name() == "_" {
+ continue
+ }
+ emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos())
+ }
+ }
+ }
+
+ // Build all package-level functions, init functions
+ // and methods, including unreachable/blank ones.
+ // We build them in source order, but it's not significant.
+ for _, file := range p.files {
+ for _, decl := range file.Decls {
+ if decl, ok := decl.(*ast.FuncDecl); ok {
+ b.buildFuncDecl(p, decl)
+ }
+ }
+ }
+
+ // Finish up init().
+ if p.Prog.mode&BareInits == 0 {
+ emitJump(init, done)
+ init.currentBlock = done
+ }
+ init.emit(new(Return))
+ init.finishBody()
+
+ p.info = nil // We no longer need ASTs or go/types deductions.
+
+ if p.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckPackage(p)
+ }
+}
+
+// Like ObjectOf, but panics instead of returning nil.
+// Only valid during p's create and build phases.
+func (p *Package) objectOf(id *ast.Ident) types.Object {
+ if o := p.info.ObjectOf(id); o != nil {
+ return o
+ }
+ panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
+ id.Name, p.Prog.Fset.Position(id.Pos())))
+}
+
+// Like TypeOf, but panics instead of returning nil.
+// Only valid during p's create and build phases.
+func (p *Package) typeOf(e ast.Expr) types.Type {
+ if T := p.info.TypeOf(e); T != nil {
+ return T
+ }
+ panic(fmt.Sprintf("no type for %T @ %s",
+ e, p.Prog.Fset.Position(e.Pos())))
+}
diff --git a/vendor/honnef.co/go/tools/ssa/const.go b/vendor/honnef.co/go/tools/ssa/const.go
new file mode 100644
index 0000000..3606e0f
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/const.go
@@ -0,0 +1,171 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.6
+
+package ssa
+
+// This file defines the Const SSA value type.
+
+import (
+ "fmt"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "strconv"
+)
+
+// NewConst returns a new constant of the specified value and type.
+// val must be valid according to the specification of Const.Value.
+//
+func NewConst(val exact.Value, typ types.Type) *Const {
+ return &Const{typ, val}
+}
+
+// intConst returns an 'int' constant that evaluates to i.
+// (i is an int64 in case the host is narrower than the target.)
+func intConst(i int64) *Const {
+ return NewConst(exact.MakeInt64(i), tInt)
+}
+
+// nilConst returns a nil constant of the specified type, which may
+// be any reference type, including interfaces.
+//
+func nilConst(typ types.Type) *Const {
+ return NewConst(nil, typ)
+}
+
+// stringConst returns a 'string' constant that evaluates to s.
+func stringConst(s string) *Const {
+ return NewConst(exact.MakeString(s), tString)
+}
+
+// zeroConst returns a new "zero" constant of the specified type,
+// which must not be an array or struct type: the zero values of
+// aggregates are well-defined but cannot be represented by Const.
+//
+func zeroConst(t types.Type) *Const {
+ switch t := t.(type) {
+ case *types.Basic:
+ switch {
+ case t.Info()&types.IsBoolean != 0:
+ return NewConst(exact.MakeBool(false), t)
+ case t.Info()&types.IsNumeric != 0:
+ return NewConst(exact.MakeInt64(0), t)
+ case t.Info()&types.IsString != 0:
+ return NewConst(exact.MakeString(""), t)
+ case t.Kind() == types.UnsafePointer:
+ fallthrough
+ case t.Kind() == types.UntypedNil:
+ return nilConst(t)
+ default:
+ panic(fmt.Sprint("zeroConst for unexpected type:", t))
+ }
+ case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
+ return nilConst(t)
+ case *types.Named:
+ return NewConst(zeroConst(t.Underlying()).Value, t)
+ case *types.Array, *types.Struct, *types.Tuple:
+ panic(fmt.Sprint("zeroConst applied to aggregate:", t))
+ }
+ panic(fmt.Sprint("zeroConst: unexpected ", t))
+}
+
+func (c *Const) RelString(from *types.Package) string {
+ var s string
+ if c.Value == nil {
+ s = "nil"
+ } else if c.Value.Kind() == exact.String {
+ s = exact.StringVal(c.Value)
+ const max = 20
+ // TODO(adonovan): don't cut a rune in half.
+ if len(s) > max {
+ s = s[:max-3] + "..." // abbreviate
+ }
+ s = strconv.Quote(s)
+ } else {
+ s = c.Value.String()
+ }
+ return s + ":" + relType(c.Type(), from)
+}
+
+func (c *Const) Name() string {
+ return c.RelString(nil)
+}
+
+func (c *Const) String() string {
+ return c.Name()
+}
+
+func (c *Const) Type() types.Type {
+ return c.typ
+}
+
+func (c *Const) Referrers() *[]Instruction {
+ return nil
+}
+
+func (c *Const) Parent() *Function { return nil }
+
+func (c *Const) Pos() token.Pos {
+ return token.NoPos
+}
+
+// IsNil returns true if this constant represents a typed or untyped nil value.
+func (c *Const) IsNil() bool {
+ return c.Value == nil
+}
+
+// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp.
+
+// Int64 returns the numeric value of this constant truncated to fit
+// a signed 64-bit integer.
+//
+func (c *Const) Int64() int64 {
+ switch x := exact.ToInt(c.Value); x.Kind() {
+ case exact.Int:
+ if i, ok := exact.Int64Val(x); ok {
+ return i
+ }
+ return 0
+ case exact.Float:
+ f, _ := exact.Float64Val(x)
+ return int64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Uint64 returns the numeric value of this constant truncated to fit
+// an unsigned 64-bit integer.
+//
+func (c *Const) Uint64() uint64 {
+ switch x := exact.ToInt(c.Value); x.Kind() {
+ case exact.Int:
+ if u, ok := exact.Uint64Val(x); ok {
+ return u
+ }
+ return 0
+ case exact.Float:
+ f, _ := exact.Float64Val(x)
+ return uint64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Float64 returns the numeric value of this constant truncated to fit
+// a float64.
+//
+func (c *Const) Float64() float64 {
+ f, _ := exact.Float64Val(c.Value)
+ return f
+}
+
+// Complex128 returns the complex value of this constant truncated to
+// fit a complex128.
+//
+func (c *Const) Complex128() complex128 {
+ re, _ := exact.Float64Val(exact.Real(c.Value))
+ im, _ := exact.Float64Val(exact.Imag(c.Value))
+ return complex(re, im)
+}
diff --git a/vendor/honnef.co/go/tools/ssa/const15.go b/vendor/honnef.co/go/tools/ssa/const15.go
new file mode 100644
index 0000000..49b5333
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/const15.go
@@ -0,0 +1,171 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5,!go1.6
+
+package ssa
+
+// This file defines the Const SSA value type.
+
+import (
+ "fmt"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "strconv"
+)
+
+// NewConst returns a new constant of the specified value and type.
+// val must be valid according to the specification of Const.Value.
+//
+func NewConst(val exact.Value, typ types.Type) *Const {
+ return &Const{typ, val}
+}
+
+// intConst returns an 'int' constant that evaluates to i.
+// (i is an int64 in case the host is narrower than the target.)
+func intConst(i int64) *Const {
+ return NewConst(exact.MakeInt64(i), tInt)
+}
+
+// nilConst returns a nil constant of the specified type, which may
+// be any reference type, including interfaces.
+//
+func nilConst(typ types.Type) *Const {
+ return NewConst(nil, typ)
+}
+
+// stringConst returns a 'string' constant that evaluates to s.
+func stringConst(s string) *Const {
+ return NewConst(exact.MakeString(s), tString)
+}
+
+// zeroConst returns a new "zero" constant of the specified type,
+// which must not be an array or struct type: the zero values of
+// aggregates are well-defined but cannot be represented by Const.
+//
+func zeroConst(t types.Type) *Const {
+ switch t := t.(type) {
+ case *types.Basic:
+ switch {
+ case t.Info()&types.IsBoolean != 0:
+ return NewConst(exact.MakeBool(false), t)
+ case t.Info()&types.IsNumeric != 0:
+ return NewConst(exact.MakeInt64(0), t)
+ case t.Info()&types.IsString != 0:
+ return NewConst(exact.MakeString(""), t)
+ case t.Kind() == types.UnsafePointer:
+ fallthrough
+ case t.Kind() == types.UntypedNil:
+ return nilConst(t)
+ default:
+ panic(fmt.Sprint("zeroConst for unexpected type:", t))
+ }
+ case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
+ return nilConst(t)
+ case *types.Named:
+ return NewConst(zeroConst(t.Underlying()).Value, t)
+ case *types.Array, *types.Struct, *types.Tuple:
+ panic(fmt.Sprint("zeroConst applied to aggregate:", t))
+ }
+ panic(fmt.Sprint("zeroConst: unexpected ", t))
+}
+
+func (c *Const) RelString(from *types.Package) string {
+ var s string
+ if c.Value == nil {
+ s = "nil"
+ } else if c.Value.Kind() == exact.String {
+ s = exact.StringVal(c.Value)
+ const max = 20
+ // TODO(adonovan): don't cut a rune in half.
+ if len(s) > max {
+ s = s[:max-3] + "..." // abbreviate
+ }
+ s = strconv.Quote(s)
+ } else {
+ s = c.Value.String()
+ }
+ return s + ":" + relType(c.Type(), from)
+}
+
+func (c *Const) Name() string {
+ return c.RelString(nil)
+}
+
+func (c *Const) String() string {
+ return c.Name()
+}
+
+func (c *Const) Type() types.Type {
+ return c.typ
+}
+
+func (c *Const) Referrers() *[]Instruction {
+ return nil
+}
+
+func (c *Const) Parent() *Function { return nil }
+
+func (c *Const) Pos() token.Pos {
+ return token.NoPos
+}
+
+// IsNil returns true if this constant represents a typed or untyped nil value.
+func (c *Const) IsNil() bool {
+ return c.Value == nil
+}
+
+// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp.
+
+// Int64 returns the numeric value of this constant truncated to fit
+// a signed 64-bit integer.
+//
+func (c *Const) Int64() int64 {
+ switch x := c.Value; x.Kind() {
+ case exact.Int:
+ if i, ok := exact.Int64Val(x); ok {
+ return i
+ }
+ return 0
+ case exact.Float:
+ f, _ := exact.Float64Val(x)
+ return int64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Uint64 returns the numeric value of this constant truncated to fit
+// an unsigned 64-bit integer.
+//
+func (c *Const) Uint64() uint64 {
+ switch x := c.Value; x.Kind() {
+ case exact.Int:
+ if u, ok := exact.Uint64Val(x); ok {
+ return u
+ }
+ return 0
+ case exact.Float:
+ f, _ := exact.Float64Val(x)
+ return uint64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Float64 returns the numeric value of this constant truncated to fit
+// a float64.
+//
+func (c *Const) Float64() float64 {
+ f, _ := exact.Float64Val(c.Value)
+ return f
+}
+
+// Complex128 returns the complex value of this constant truncated to
+// fit a complex128.
+//
+func (c *Const) Complex128() complex128 {
+ re, _ := exact.Float64Val(exact.Real(c.Value))
+ im, _ := exact.Float64Val(exact.Imag(c.Value))
+ return complex(re, im)
+}
diff --git a/vendor/honnef.co/go/tools/ssa/create.go b/vendor/honnef.co/go/tools/ssa/create.go
new file mode 100644
index 0000000..69ac12b
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/create.go
@@ -0,0 +1,263 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the CREATE phase of SSA construction.
+// See builder.go for explanation.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "os"
+ "sync"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// NewProgram returns a new SSA Program.
+//
+// mode controls diagnostics and checking during SSA construction.
+//
+func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
+ prog := &Program{
+ Fset: fset,
+ imported: make(map[string]*Package),
+ packages: make(map[*types.Package]*Package),
+ thunks: make(map[selectionKey]*Function),
+ bounds: make(map[*types.Func]*Function),
+ mode: mode,
+ }
+
+ h := typeutil.MakeHasher() // protected by methodsMu, in effect
+ prog.methodSets.SetHasher(h)
+ prog.canon.SetHasher(h)
+
+ return prog
+}
+
+// memberFromObject populates package pkg with a member for the
+// typechecker object obj.
+//
+// For objects from Go source code, syntax is the associated syntax
+// tree (for funcs and vars only); it will be used during the build
+// phase.
+//
+func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
+ name := obj.Name()
+ switch obj := obj.(type) {
+ case *types.Builtin:
+ if pkg.Pkg != types.Unsafe {
+ panic("unexpected builtin object: " + obj.String())
+ }
+
+ case *types.TypeName:
+ pkg.Members[name] = &Type{
+ object: obj,
+ pkg: pkg,
+ }
+
+ case *types.Const:
+ c := &NamedConst{
+ object: obj,
+ Value: NewConst(obj.Val(), obj.Type()),
+ pkg: pkg,
+ }
+ pkg.values[obj] = c.Value
+ pkg.Members[name] = c
+
+ case *types.Var:
+ g := &Global{
+ Pkg: pkg,
+ name: name,
+ object: obj,
+ typ: types.NewPointer(obj.Type()), // address
+ pos: obj.Pos(),
+ }
+ pkg.values[obj] = g
+ pkg.Members[name] = g
+
+ case *types.Func:
+ sig := obj.Type().(*types.Signature)
+ if sig.Recv() == nil && name == "init" {
+ pkg.ninit++
+ name = fmt.Sprintf("init#%d", pkg.ninit)
+ }
+ fn := &Function{
+ name: name,
+ object: obj,
+ Signature: sig,
+ syntax: syntax,
+ pos: obj.Pos(),
+ Pkg: pkg,
+ Prog: pkg.Prog,
+ }
+ if syntax == nil {
+ fn.Synthetic = "loaded from gc object file"
+ }
+
+ pkg.values[obj] = fn
+ if sig.Recv() == nil {
+ pkg.Members[name] = fn // package-level function
+ }
+
+ default: // (incl. *types.Package)
+ panic("unexpected Object type: " + obj.String())
+ }
+}
+
+// membersFromDecl populates package pkg with members for each
+// typechecker object (var, func, const or type) associated with the
+// specified decl.
+//
+func membersFromDecl(pkg *Package, decl ast.Decl) {
+ switch decl := decl.(type) {
+ case *ast.GenDecl: // import, const, type or var
+ switch decl.Tok {
+ case token.CONST:
+ for _, spec := range decl.Specs {
+ for _, id := range spec.(*ast.ValueSpec).Names {
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], nil)
+ }
+ }
+ }
+
+ case token.VAR:
+ for _, spec := range decl.Specs {
+ for _, id := range spec.(*ast.ValueSpec).Names {
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], spec)
+ }
+ }
+ }
+
+ case token.TYPE:
+ for _, spec := range decl.Specs {
+ id := spec.(*ast.TypeSpec).Name
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], nil)
+ }
+ }
+ }
+
+ case *ast.FuncDecl:
+ id := decl.Name
+ if !isBlankIdent(id) {
+ memberFromObject(pkg, pkg.info.Defs[id], decl)
+ }
+ }
+}
+
+// CreatePackage constructs and returns an SSA Package from the
+// specified type-checked, error-free file ASTs, and populates its
+// Members mapping.
+//
+// importable determines whether this package should be returned by a
+// subsequent call to ImportedPackage(pkg.Path()).
+//
+// The real work of building SSA form for each function is not done
+// until a subsequent call to Package.Build().
+//
+func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
+ p := &Package{
+ Prog: prog,
+ Members: make(map[string]Member),
+ values: make(map[types.Object]Value),
+ Pkg: pkg,
+ info: info, // transient (CREATE and BUILD phases)
+ files: files, // transient (CREATE and BUILD phases)
+ }
+
+ // Add init() function.
+ p.init = &Function{
+ name: "init",
+ Signature: new(types.Signature),
+ Synthetic: "package initializer",
+ Pkg: p,
+ Prog: prog,
+ }
+ p.Members[p.init.name] = p.init
+
+ // CREATE phase.
+ // Allocate all package members: vars, funcs, consts and types.
+ if len(files) > 0 {
+ // Go source package.
+ for _, file := range files {
+ for _, decl := range file.Decls {
+ membersFromDecl(p, decl)
+ }
+ }
+ } else {
+ // GC-compiled binary package (or "unsafe")
+ // No code.
+ // No position information.
+ scope := p.Pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ memberFromObject(p, obj, nil)
+ if obj, ok := obj.(*types.TypeName); ok {
+ if named, ok := obj.Type().(*types.Named); ok {
+ for i, n := 0, named.NumMethods(); i < n; i++ {
+ memberFromObject(p, named.Method(i), nil)
+ }
+ }
+ }
+ }
+ }
+
+ if prog.mode&BareInits == 0 {
+ // Add initializer guard variable.
+ initguard := &Global{
+ Pkg: p,
+ name: "init$guard",
+ typ: types.NewPointer(tBool),
+ }
+ p.Members[initguard.Name()] = initguard
+ }
+
+ if prog.mode&GlobalDebug != 0 {
+ p.SetDebugMode(true)
+ }
+
+ if prog.mode&PrintPackages != 0 {
+ printMu.Lock()
+ p.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
+
+ if importable {
+ prog.imported[p.Pkg.Path()] = p
+ }
+ prog.packages[p.Pkg] = p
+
+ return p
+}
+
+// printMu serializes printing of Packages/Functions to stdout.
+var printMu sync.Mutex
+
+// AllPackages returns a new slice containing all packages in the
+// program prog in unspecified order.
+//
+func (prog *Program) AllPackages() []*Package {
+ pkgs := make([]*Package, 0, len(prog.packages))
+ for _, pkg := range prog.packages {
+ pkgs = append(pkgs, pkg)
+ }
+ return pkgs
+}
+
+// ImportedPackage returns the importable SSA Package whose import
+// path is path, or nil if no such SSA package has been created.
+//
+// Not all packages are importable. For example, no import
+// declaration can resolve to the x_test package created by 'go test'
+// or the ad-hoc main package created 'go build foo.go'.
+//
+func (prog *Program) ImportedPackage(path string) *Package {
+ return prog.imported[path]
+}
diff --git a/vendor/honnef.co/go/tools/ssa/doc.go b/vendor/honnef.co/go/tools/ssa/doc.go
new file mode 100644
index 0000000..57474dd
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/doc.go
@@ -0,0 +1,123 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ssa defines a representation of the elements of Go programs
+// (packages, types, functions, variables and constants) using a
+// static single-assignment (SSA) form intermediate representation
+// (IR) for the bodies of functions.
+//
+// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
+//
+// For an introduction to SSA form, see
+// http://en.wikipedia.org/wiki/Static_single_assignment_form.
+// This page provides a broader reading list:
+// http://www.dcs.gla.ac.uk/~jsinger/ssa.html.
+//
+// The level of abstraction of the SSA form is intentionally close to
+// the source language to facilitate construction of source analysis
+// tools. It is not intended for machine code generation.
+//
+// All looping, branching and switching constructs are replaced with
+// unstructured control flow. Higher-level control flow constructs
+// such as multi-way branch can be reconstructed as needed; see
+// ssautil.Switches() for an example.
+//
+// To construct an SSA-form program, call ssautil.CreateProgram on a
+// loader.Program, a set of type-checked packages created from
+// parsed Go source files. The resulting ssa.Program contains all the
+// packages and their members, but SSA code is not created for
+// function bodies until a subsequent call to (*Package).Build.
+//
+// The builder initially builds a naive SSA form in which all local
+// variables are addresses of stack locations with explicit loads and
+// stores. Registerisation of eligible locals and φ-node insertion
+// using dominance and dataflow are then performed as a second pass
+// called "lifting" to improve the accuracy and performance of
+// subsequent analyses; this pass can be skipped by setting the
+// NaiveForm builder flag.
+//
+// The primary interfaces of this package are:
+//
+// - Member: a named member of a Go package.
+// - Value: an expression that yields a value.
+// - Instruction: a statement that consumes values and performs computation.
+// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
+//
+// A computation that yields a result implements both the Value and
+// Instruction interfaces. The following table shows for each
+// concrete type which of these interfaces it implements.
+//
+// Value? Instruction? Member?
+// *Alloc ✔ ✔
+// *BinOp ✔ ✔
+// *Builtin ✔
+// *Call ✔ ✔
+// *ChangeInterface ✔ ✔
+// *ChangeType ✔ ✔
+// *Const ✔
+// *Convert ✔ ✔
+// *DebugRef ✔
+// *Defer ✔
+// *Extract ✔ ✔
+// *Field ✔ ✔
+// *FieldAddr ✔ ✔
+// *FreeVar ✔
+// *Function ✔ ✔ (func)
+// *Global ✔ ✔ (var)
+// *Go ✔
+// *If ✔
+// *Index ✔ ✔
+// *IndexAddr ✔ ✔
+// *Jump ✔
+// *Lookup ✔ ✔
+// *MakeChan ✔ ✔
+// *MakeClosure ✔ ✔
+// *MakeInterface ✔ ✔
+// *MakeMap ✔ ✔
+// *MakeSlice ✔ ✔
+// *MapUpdate ✔
+// *NamedConst ✔ (const)
+// *Next ✔ ✔
+// *Panic ✔
+// *Parameter ✔
+// *Phi ✔ ✔
+// *Range ✔ ✔
+// *Return ✔
+// *RunDefers ✔
+// *Select ✔ ✔
+// *Send ✔
+// *Slice ✔ ✔
+// *Store ✔
+// *Type ✔ (type)
+// *TypeAssert ✔ ✔
+// *UnOp ✔ ✔
+//
+// Other key types in this package include: Program, Package, Function
+// and BasicBlock.
+//
+// The program representation constructed by this package is fully
+// resolved internally, i.e. it does not rely on the names of Values,
+// Packages, Functions, Types or BasicBlocks for the correct
+// interpretation of the program. Only the identities of objects and
+// the topology of the SSA and type graphs are semantically
+// significant. (There is one exception: Ids, used to identify field
+// and method names, contain strings.) Avoidance of name-based
+// operations simplifies the implementation of subsequent passes and
+// can make them very efficient. Many objects are nonetheless named
+// to aid in debugging, but it is not essential that the names be
+// either accurate or unambiguous. The public API exposes a number of
+// name-based maps for client convenience.
+//
+// The ssa/ssautil package provides various utilities that depend only
+// on the public API of this package.
+//
+// TODO(adonovan): Consider the exceptional control-flow implications
+// of defer and recover().
+//
+// TODO(adonovan): write a how-to document for all the various cases
+// of trying to determine corresponding elements across the four
+// domains of source locations, ast.Nodes, types.Objects,
+// ssa.Values/Instructions.
+//
+package ssa // import "honnef.co/go/tools/ssa"
diff --git a/vendor/honnef.co/go/tools/ssa/dom.go b/vendor/honnef.co/go/tools/ssa/dom.go
new file mode 100644
index 0000000..12ef430
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/dom.go
@@ -0,0 +1,341 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines algorithms related to dominance.
+
+// Dominator tree construction ----------------------------------------
+//
+// We use the algorithm described in Lengauer & Tarjan. 1979. A fast
+// algorithm for finding dominators in a flowgraph.
+// http://doi.acm.org/10.1145/357062.357071
+//
+// We also apply the optimizations to SLT described in Georgiadis et
+// al, Finding Dominators in Practice, JGAA 2006,
+// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf
+// to avoid the need for buckets of size > 1.
+
+import (
+ "bytes"
+ "fmt"
+ "math/big"
+ "os"
+ "sort"
+)
+
+// Idom returns the block that immediately dominates b:
+// its parent in the dominator tree, if any.
+// Neither the entry node (b.Index==0) nor recover node
+// (b==b.Parent().Recover()) have a parent.
+//
+func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
+
+// Dominees returns the list of blocks that b immediately dominates:
+// its children in the dominator tree.
+//
+func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children }
+
+// Dominates reports whether b dominates c.
+func (b *BasicBlock) Dominates(c *BasicBlock) bool {
+ return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post
+}
+
+type byDomPreorder []*BasicBlock
+
+func (a byDomPreorder) Len() int { return len(a) }
+func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre }
+
+// DomPreorder returns a new slice containing the blocks of f in
+// dominator tree preorder.
+//
+func (f *Function) DomPreorder() []*BasicBlock {
+ n := len(f.Blocks)
+ order := make(byDomPreorder, n, n)
+ copy(order, f.Blocks)
+ sort.Sort(order)
+ return order
+}
+
+// domInfo contains a BasicBlock's dominance information.
+type domInfo struct {
+ idom *BasicBlock // immediate dominator (parent in domtree)
+ children []*BasicBlock // nodes immediately dominated by this one
+ pre, post int32 // pre- and post-order numbering within domtree
+}
+
+// ltState holds the working state for Lengauer-Tarjan algorithm
+// (during which domInfo.pre is repurposed for CFG DFS preorder number).
+type ltState struct {
+ // Each slice is indexed by b.Index.
+ sdom []*BasicBlock // b's semidominator
+ parent []*BasicBlock // b's parent in DFS traversal of CFG
+ ancestor []*BasicBlock // b's ancestor with least sdom
+}
+
+// dfs implements the depth-first search part of the LT algorithm.
+func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 {
+ preorder[i] = v
+ v.dom.pre = i // For now: DFS preorder of spanning tree of CFG
+ i++
+ lt.sdom[v.Index] = v
+ lt.link(nil, v)
+ for _, w := range v.Succs {
+ if lt.sdom[w.Index] == nil {
+ lt.parent[w.Index] = v
+ i = lt.dfs(w, i, preorder)
+ }
+ }
+ return i
+}
+
+// eval implements the EVAL part of the LT algorithm.
+func (lt *ltState) eval(v *BasicBlock) *BasicBlock {
+ // TODO(adonovan): opt: do path compression per simple LT.
+ u := v
+ for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] {
+ if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre {
+ u = v
+ }
+ }
+ return u
+}
+
+// link implements the LINK part of the LT algorithm.
+func (lt *ltState) link(v, w *BasicBlock) {
+ lt.ancestor[w.Index] = v
+}
+
+// buildDomTree computes the dominator tree of f using the LT algorithm.
+// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
+//
+func buildDomTree(f *Function) {
+ // The step numbers refer to the original LT paper; the
+ // reordering is due to Georgiadis.
+
+ // Clear any previous domInfo.
+ for _, b := range f.Blocks {
+ b.dom = domInfo{}
+ }
+
+ n := len(f.Blocks)
+ // Allocate space for 5 contiguous [n]*BasicBlock arrays:
+ // sdom, parent, ancestor, preorder, buckets.
+ space := make([]*BasicBlock, 5*n, 5*n)
+ lt := ltState{
+ sdom: space[0:n],
+ parent: space[n : 2*n],
+ ancestor: space[2*n : 3*n],
+ }
+
+ // Step 1. Number vertices by depth-first preorder.
+ preorder := space[3*n : 4*n]
+ root := f.Blocks[0]
+ prenum := lt.dfs(root, 0, preorder)
+ recover := f.Recover
+ if recover != nil {
+ lt.dfs(recover, prenum, preorder)
+ }
+
+ buckets := space[4*n : 5*n]
+ copy(buckets, preorder)
+
+ // In reverse preorder...
+ for i := int32(n) - 1; i > 0; i-- {
+ w := preorder[i]
+
+ // Step 3. Implicitly define the immediate dominator of each node.
+ for v := buckets[i]; v != w; v = buckets[v.dom.pre] {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < i {
+ v.dom.idom = u
+ } else {
+ v.dom.idom = w
+ }
+ }
+
+ // Step 2. Compute the semidominators of all nodes.
+ lt.sdom[w.Index] = lt.parent[w.Index]
+ for _, v := range w.Preds {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre {
+ lt.sdom[w.Index] = lt.sdom[u.Index]
+ }
+ }
+
+ lt.link(lt.parent[w.Index], w)
+
+ if lt.parent[w.Index] == lt.sdom[w.Index] {
+ w.dom.idom = lt.parent[w.Index]
+ } else {
+ buckets[i] = buckets[lt.sdom[w.Index].dom.pre]
+ buckets[lt.sdom[w.Index].dom.pre] = w
+ }
+ }
+
+ // The final 'Step 3' is now outside the loop.
+ for v := buckets[0]; v != root; v = buckets[v.dom.pre] {
+ v.dom.idom = root
+ }
+
+ // Step 4. Explicitly define the immediate dominator of each
+ // node, in preorder.
+ for _, w := range preorder[1:] {
+ if w == root || w == recover {
+ w.dom.idom = nil
+ } else {
+ if w.dom.idom != lt.sdom[w.Index] {
+ w.dom.idom = w.dom.idom.dom.idom
+ }
+ // Calculate Children relation as inverse of Idom.
+ w.dom.idom.dom.children = append(w.dom.idom.dom.children, w)
+ }
+ }
+
+ pre, post := numberDomTree(root, 0, 0)
+ if recover != nil {
+ numberDomTree(recover, pre, post)
+ }
+
+ // printDomTreeDot(os.Stderr, f) // debugging
+ // printDomTreeText(os.Stderr, root, 0) // debugging
+
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckDomTree(f)
+ }
+}
+
+// numberDomTree sets the pre- and post-order numbers of a depth-first
+// traversal of the dominator tree rooted at v. These are used to
+// answer dominance queries in constant time.
+//
+func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
+ v.dom.pre = pre
+ pre++
+ for _, child := range v.dom.children {
+ pre, post = numberDomTree(child, pre, post)
+ }
+ v.dom.post = post
+ post++
+ return pre, post
+}
+
+// Testing utilities ----------------------------------------
+
+// sanityCheckDomTree checks the correctness of the dominator tree
+// computed by the LT algorithm by comparing against the dominance
+// relation computed by a naive Kildall-style forward dataflow
+// analysis (Algorithm 10.16 from the "Dragon" book).
+//
+func sanityCheckDomTree(f *Function) {
+ n := len(f.Blocks)
+
+ // D[i] is the set of blocks that dominate f.Blocks[i],
+ // represented as a bit-set of block indices.
+ D := make([]big.Int, n)
+
+ one := big.NewInt(1)
+
+ // all is the set of all blocks; constant.
+ var all big.Int
+ all.Set(one).Lsh(&all, uint(n)).Sub(&all, one)
+
+ // Initialization.
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
+ // A root is dominated only by itself.
+ D[i].SetBit(&D[0], 0, 1)
+ } else {
+ // All other blocks are (initially) dominated
+ // by every block.
+ D[i].Set(&all)
+ }
+ }
+
+ // Iteration until fixed point.
+ for changed := true; changed; {
+ changed = false
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
+ continue
+ }
+ // Compute intersection across predecessors.
+ var x big.Int
+ x.Set(&all)
+ for _, pred := range b.Preds {
+ x.And(&x, &D[pred.Index])
+ }
+ x.SetBit(&x, i, 1) // a block always dominates itself.
+ if D[i].Cmp(&x) != 0 {
+ D[i].Set(&x)
+ changed = true
+ }
+ }
+ }
+
+ // Check the entire relation. O(n^2).
+ // The Recover block (if any) must be treated specially so we skip it.
+ ok := true
+ for i := 0; i < n; i++ {
+ for j := 0; j < n; j++ {
+ b, c := f.Blocks[i], f.Blocks[j]
+ if c == f.Recover {
+ continue
+ }
+ actual := b.Dominates(c)
+ expected := D[j].Bit(i) == 1
+ if actual != expected {
+ fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected)
+ ok = false
+ }
+ }
+ }
+
+ preorder := f.DomPreorder()
+ for _, b := range f.Blocks {
+ if got := preorder[b.dom.pre]; got != b {
+ fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b)
+ ok = false
+ }
+ }
+
+ if !ok {
+ panic("sanityCheckDomTree failed for " + f.String())
+ }
+
+}
+
+// Printing functions ----------------------------------------
+
+// printDomTree prints the dominator tree as text, using indentation.
+func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
+ fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
+ for _, child := range v.dom.children {
+ printDomTreeText(buf, child, indent+1)
+ }
+}
+
+// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
+// (.dot) format.
+func printDomTreeDot(buf *bytes.Buffer, f *Function) {
+ fmt.Fprintln(buf, "//", f)
+ fmt.Fprintln(buf, "digraph domtree {")
+ for i, b := range f.Blocks {
+ v := b.dom
+ fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post)
+ // TODO(adonovan): improve appearance of edges
+ // belonging to both dominator tree and CFG.
+
+ // Dominator tree edge.
+ if i != 0 {
+ fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre)
+ }
+ // CFG edges.
+ for _, pred := range b.Preds {
+ fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre)
+ }
+ }
+ fmt.Fprintln(buf, "}")
+}
diff --git a/vendor/honnef.co/go/tools/ssa/emit.go b/vendor/honnef.co/go/tools/ssa/emit.go
new file mode 100644
index 0000000..400da21
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/emit.go
@@ -0,0 +1,475 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// Helpers for emitting SSA instructions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// emitNew emits to f a new (heap Alloc) instruction allocating an
+// object of type typ. pos is the optional source location.
+//
+func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc {
+ v := &Alloc{Heap: true}
+ v.setType(types.NewPointer(typ))
+ v.setPos(pos)
+ f.emit(v)
+ return v
+}
+
+// emitLoad emits to f an instruction to load the address addr into a
+// new temporary, and returns the value so defined.
+//
+func emitLoad(f *Function, addr Value) *UnOp {
+ v := &UnOp{Op: token.MUL, X: addr}
+ v.setType(deref(addr.Type()))
+ f.emit(v)
+ return v
+}
+
+// emitDebugRef emits to f a DebugRef pseudo-instruction associating
+// expression e with value v.
+//
+func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
+ if !f.debugInfo() {
+ return // debugging not enabled
+ }
+ if v == nil || e == nil {
+ panic("nil")
+ }
+ var obj types.Object
+ e = unparen(e)
+ if id, ok := e.(*ast.Ident); ok {
+ if isBlankIdent(id) {
+ return
+ }
+ obj = f.Pkg.objectOf(id)
+ switch obj.(type) {
+ case *types.Nil, *types.Const, *types.Builtin:
+ return
+ }
+ }
+ f.emit(&DebugRef{
+ X: v,
+ Expr: e,
+ IsAddr: isAddr,
+ object: obj,
+ })
+}
+
+// emitArith emits to f code to compute the binary operation op(x, y)
+// where op is an eager shift, logical or arithmetic operation.
+// (Use emitCompare() for comparisons and Builder.logicalBinop() for
+// non-eager operations.)
+//
+func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
+ switch op {
+ case token.SHL, token.SHR:
+ x = emitConv(f, x, t)
+ // y may be signed or an 'untyped' constant.
+ // TODO(adonovan): whence signed values?
+ if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 {
+ y = emitConv(f, y, types.Typ[types.Uint64])
+ }
+
+ case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+ x = emitConv(f, x, t)
+ y = emitConv(f, y, t)
+
+ default:
+ panic("illegal op in emitArith: " + op.String())
+
+ }
+ v := &BinOp{
+ Op: op,
+ X: x,
+ Y: y,
+ }
+ v.setPos(pos)
+ v.setType(t)
+ return f.emit(v)
+}
+
+// emitCompare emits to f code compute the boolean result of
+// comparison comparison 'x op y'.
+//
+func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
+ xt := x.Type().Underlying()
+ yt := y.Type().Underlying()
+
+ // Special case to optimise a tagless SwitchStmt so that
+ // these are equivalent
+ // switch { case e: ...}
+ // switch true { case e: ... }
+ // if e==true { ... }
+ // even in the case when e's type is an interface.
+ // TODO(adonovan): opt: generalise to x==true, false!=y, etc.
+ if x == vTrue && op == token.EQL {
+ if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 {
+ return y
+ }
+ }
+
+ if types.Identical(xt, yt) {
+ // no conversion necessary
+ } else if _, ok := xt.(*types.Interface); ok {
+ y = emitConv(f, y, x.Type())
+ } else if _, ok := yt.(*types.Interface); ok {
+ x = emitConv(f, x, y.Type())
+ } else if _, ok := x.(*Const); ok {
+ x = emitConv(f, x, y.Type())
+ } else if _, ok := y.(*Const); ok {
+ y = emitConv(f, y, x.Type())
+ } else {
+ // other cases, e.g. channels. No-op.
+ }
+
+ v := &BinOp{
+ Op: op,
+ X: x,
+ Y: y,
+ }
+ v.setPos(pos)
+ v.setType(tBool)
+ return f.emit(v)
+}
+
+// isValuePreserving returns true if a conversion from ut_src to
+// ut_dst is value-preserving, i.e. just a change of type.
+// Precondition: neither argument is a named type.
+//
+func isValuePreserving(ut_src, ut_dst types.Type) bool {
+ // Identical underlying types?
+ if structTypesIdentical(ut_dst, ut_src) {
+ return true
+ }
+
+ switch ut_dst.(type) {
+ case *types.Chan:
+ // Conversion between channel types?
+ _, ok := ut_src.(*types.Chan)
+ return ok
+
+ case *types.Pointer:
+ // Conversion between pointers with identical base types?
+ _, ok := ut_src.(*types.Pointer)
+ return ok
+ }
+ return false
+}
+
+// emitConv emits to f code to convert Value val to exactly type typ,
+// and returns the converted value. Implicit conversions are required
+// by language assignability rules in assignments, parameter passing,
+// etc. Conversions cannot fail dynamically.
+//
+func emitConv(f *Function, val Value, typ types.Type) Value {
+ t_src := val.Type()
+
+ // Identical types? Conversion is a no-op.
+ if types.Identical(t_src, typ) {
+ return val
+ }
+
+ ut_dst := typ.Underlying()
+ ut_src := t_src.Underlying()
+
+ // Just a change of type, but not value or representation?
+ if isValuePreserving(ut_src, ut_dst) {
+ c := &ChangeType{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ // Conversion to, or construction of a value of, an interface type?
+ if _, ok := ut_dst.(*types.Interface); ok {
+ // Assignment from one interface type to another?
+ if _, ok := ut_src.(*types.Interface); ok {
+ c := &ChangeInterface{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ // Untyped nil constant? Return interface-typed nil constant.
+ if ut_src == tUntypedNil {
+ return nilConst(typ)
+ }
+
+ // Convert (non-nil) "untyped" literals to their default type.
+ if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 {
+ val = emitConv(f, val, DefaultType(ut_src))
+ }
+
+ f.Pkg.Prog.needMethodsOf(val.Type())
+ mi := &MakeInterface{X: val}
+ mi.setType(typ)
+ return f.emit(mi)
+ }
+
+ // Conversion of a compile-time constant value?
+ if c, ok := val.(*Const); ok {
+ if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() {
+ // Conversion of a compile-time constant to
+ // another constant type results in a new
+ // constant of the destination type and
+ // (initially) the same abstract value.
+ // We don't truncate the value yet.
+ return NewConst(c.Value, typ)
+ }
+
+ // We're converting from constant to non-constant type,
+ // e.g. string -> []byte/[]rune.
+ }
+
+ // A representation-changing conversion?
+ // At least one of {ut_src,ut_dst} must be *Basic.
+ // (The other may be []byte or []rune.)
+ _, ok1 := ut_src.(*types.Basic)
+ _, ok2 := ut_dst.(*types.Basic)
+ if ok1 || ok2 {
+ c := &Convert{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
+}
+
+// emitStore emits to f an instruction to store value val at location
+// addr, applying implicit conversions as required by assignability rules.
+//
+func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
+ s := &Store{
+ Addr: addr,
+ Val: emitConv(f, val, deref(addr.Type())),
+ pos: pos,
+ }
+ f.emit(s)
+ return s
+}
+
+// emitJump emits to f a jump to target, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+//
+func emitJump(f *Function, target *BasicBlock) {
+ b := f.currentBlock
+ b.emit(new(Jump))
+ addEdge(b, target)
+ f.currentBlock = nil
+}
+
+func (b *BasicBlock) emitJump(target *BasicBlock) {
+ b.emit(new(Jump))
+ addEdge(b, target)
+}
+
+// emitIf emits to f a conditional jump to tblock or fblock based on
+// cond, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+//
+func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
+ b := f.currentBlock
+ b.emit(&If{Cond: cond})
+ addEdge(b, tblock)
+ addEdge(b, fblock)
+ f.currentBlock = nil
+}
+
+// emitExtract emits to f an instruction to extract the index'th
+// component of tuple. It returns the extracted value.
+//
+func emitExtract(f *Function, tuple Value, index int) Value {
+ e := &Extract{Tuple: tuple, Index: index}
+ e.setType(tuple.Type().(*types.Tuple).At(index).Type())
+ return f.emit(e)
+}
+
+// emitTypeAssert emits to f a type assertion value := x.(t) and
+// returns the value. x.Type() must be an interface.
+//
+func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
+ a := &TypeAssert{X: x, AssertedType: t}
+ a.setPos(pos)
+ a.setType(t)
+ return f.emit(a)
+}
+
+// emitTypeTest emits to f a type test value,ok := x.(t) and returns
+// a (value, ok) tuple. x.Type() must be an interface.
+//
+func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
+ a := &TypeAssert{
+ X: x,
+ AssertedType: t,
+ CommaOk: true,
+ }
+ a.setPos(pos)
+ a.setType(types.NewTuple(
+ newVar("value", t),
+ varOk,
+ ))
+ return f.emit(a)
+}
+
+// emitTailCall emits to f a function call in tail position. The
+// caller is responsible for all fields of 'call' except its type.
+// Intended for wrapper methods.
+// Precondition: f does/will not use deferred procedure calls.
+// Postcondition: f.currentBlock is nil.
+//
+func emitTailCall(f *Function, call *Call) {
+ tresults := f.Signature.Results()
+ nr := tresults.Len()
+ if nr == 1 {
+ call.typ = tresults.At(0).Type()
+ } else {
+ call.typ = tresults
+ }
+ tuple := f.emit(call)
+ var ret Return
+ switch nr {
+ case 0:
+ // no-op
+ case 1:
+ ret.Results = []Value{tuple}
+ default:
+ for i := 0; i < nr; i++ {
+ v := emitExtract(f, tuple, i)
+ // TODO(adonovan): in principle, this is required:
+ // v = emitConv(f, o.Type, f.Signature.Results[i].Type)
+ // but in practice emitTailCall is only used when
+ // the types exactly match.
+ ret.Results = append(ret.Results, v)
+ }
+ }
+ f.emit(&ret)
+ f.currentBlock = nil
+}
+
+// emitImplicitSelections emits to f code to apply the sequence of
+// implicit field selections specified by indices to base value v, and
+// returns the selected value.
+//
+// If v is the address of a struct, the result will be the address of
+// a field; if it is the value of a struct, the result will be the
+// value of a field.
+//
+func emitImplicitSelections(f *Function, v Value, indices []int) Value {
+ for _, index := range indices {
+ fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
+
+ if isPointer(v.Type()) {
+ instr := &FieldAddr{
+ X: v,
+ Field: index,
+ }
+ instr.setType(types.NewPointer(fld.Type()))
+ v = f.emit(instr)
+ // Load the field's value iff indirectly embedded.
+ if isPointer(fld.Type()) {
+ v = emitLoad(f, v)
+ }
+ } else {
+ instr := &Field{
+ X: v,
+ Field: index,
+ }
+ instr.setType(fld.Type())
+ v = f.emit(instr)
+ }
+ }
+ return v
+}
+
+// emitFieldSelection emits to f code to select the index'th field of v.
+//
+// If wantAddr, the input must be a pointer-to-struct and the result
+// will be the field's address; otherwise the result will be the
+// field's value.
+// Ident id is used for position and debug info.
+//
+func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value {
+ fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
+ if isPointer(v.Type()) {
+ instr := &FieldAddr{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(id.Pos())
+ instr.setType(types.NewPointer(fld.Type()))
+ v = f.emit(instr)
+ // Load the field's value iff we don't want its address.
+ if !wantAddr {
+ v = emitLoad(f, v)
+ }
+ } else {
+ instr := &Field{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(id.Pos())
+ instr.setType(fld.Type())
+ v = f.emit(instr)
+ }
+ emitDebugRef(f, id, v, wantAddr)
+ return v
+}
+
+// zeroValue emits to f code to produce a zero value of type t,
+// and returns it.
+//
+func zeroValue(f *Function, t types.Type) Value {
+ switch t.Underlying().(type) {
+ case *types.Struct, *types.Array:
+ return emitLoad(f, f.addLocal(t, token.NoPos))
+ default:
+ return zeroConst(t)
+ }
+}
+
+// createRecoverBlock emits to f a block of code to return after a
+// recovered panic, and sets f.Recover to it.
+//
+// If f's result parameters are named, the code loads and returns
+// their current values, otherwise it returns the zero values of their
+// type.
+//
+// Idempotent.
+//
+func createRecoverBlock(f *Function) {
+ if f.Recover != nil {
+ return // already created
+ }
+ saved := f.currentBlock
+
+ f.Recover = f.newBasicBlock("recover")
+ f.currentBlock = f.Recover
+
+ var results []Value
+ if f.namedResults != nil {
+ // Reload NRPs to form value tuple.
+ for _, r := range f.namedResults {
+ results = append(results, emitLoad(f, r))
+ }
+ } else {
+ R := f.Signature.Results()
+ for i, n := 0, R.Len(); i < n; i++ {
+ T := R.At(i).Type()
+
+ // Return zero value of each result type.
+ results = append(results, zeroValue(f, T))
+ }
+ }
+ f.emit(&Return{Results: results})
+
+ f.currentBlock = saved
+}
diff --git a/vendor/honnef.co/go/tools/ssa/func.go b/vendor/honnef.co/go/tools/ssa/func.go
new file mode 100644
index 0000000..86a3da7
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/func.go
@@ -0,0 +1,703 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file implements the Function and BasicBlock types.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+ "strings"
+)
+
+// addEdge adds a control-flow graph edge from from to to.
+func addEdge(from, to *BasicBlock) {
+ from.Succs = append(from.Succs, to)
+ to.Preds = append(to.Preds, from)
+}
+
+// Parent returns the function that contains block b.
+func (b *BasicBlock) Parent() *Function { return b.parent }
+
+// String returns a human-readable label of this block.
+// It is not guaranteed unique within the function.
+//
+func (b *BasicBlock) String() string {
+ return fmt.Sprintf("%d", b.Index)
+}
+
+// emit appends an instruction to the current basic block.
+// If the instruction defines a Value, it is returned.
+//
+func (b *BasicBlock) emit(i Instruction) Value {
+ i.setBlock(b)
+ b.Instrs = append(b.Instrs, i)
+ v, _ := i.(Value)
+ return v
+}
+
+// predIndex returns the i such that b.Preds[i] == c or panics if
+// there is none.
+func (b *BasicBlock) predIndex(c *BasicBlock) int {
+ for i, pred := range b.Preds {
+ if pred == c {
+ return i
+ }
+ }
+ panic(fmt.Sprintf("no edge %s -> %s", c, b))
+}
+
+// hasPhi returns true if b.Instrs contains φ-nodes.
+func (b *BasicBlock) hasPhi() bool {
+ _, ok := b.Instrs[0].(*Phi)
+ return ok
+}
+
+func (b *BasicBlock) Phis() []Instruction {
+ return b.phis()
+}
+
+// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
+func (b *BasicBlock) phis() []Instruction {
+ for i, instr := range b.Instrs {
+ if _, ok := instr.(*Phi); !ok {
+ return b.Instrs[:i]
+ }
+ }
+ return nil // unreachable in well-formed blocks
+}
+
+// replacePred replaces all occurrences of p in b's predecessor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replacePred(p, q *BasicBlock) {
+ for i, pred := range b.Preds {
+ if pred == p {
+ b.Preds[i] = q
+ }
+ }
+}
+
+// replaceSucc replaces all occurrences of p in b's successor list with q.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
+ for i, succ := range b.Succs {
+ if succ == p {
+ b.Succs[i] = q
+ }
+ }
+}
+
+func (b *BasicBlock) RemovePred(p *BasicBlock) {
+ b.removePred(p)
+}
+
+// removePred removes all occurrences of p in b's
+// predecessor list and φ-nodes.
+// Ordinarily there should be at most one.
+//
+func (b *BasicBlock) removePred(p *BasicBlock) {
+ phis := b.phis()
+
+ // We must preserve edge order for φ-nodes.
+ j := 0
+ for i, pred := range b.Preds {
+ if pred != p {
+ b.Preds[j] = b.Preds[i]
+ // Strike out φ-edge too.
+ for _, instr := range phis {
+ phi := instr.(*Phi)
+ phi.Edges[j] = phi.Edges[i]
+ }
+ j++
+ }
+ }
+ // Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
+ for i := j; i < len(b.Preds); i++ {
+ b.Preds[i] = nil
+ for _, instr := range phis {
+ instr.(*Phi).Edges[i] = nil
+ }
+ }
+ b.Preds = b.Preds[:j]
+ for _, instr := range phis {
+ phi := instr.(*Phi)
+ phi.Edges = phi.Edges[:j]
+ }
+}
+
+// Destinations associated with unlabelled for/switch/select stmts.
+// We push/pop one of these as we enter/leave each construct and for
+// each BranchStmt we scan for the innermost target of the right type.
+//
+type targets struct {
+ tail *targets // rest of stack
+ _break *BasicBlock
+ _continue *BasicBlock
+ _fallthrough *BasicBlock
+}
+
+// Destinations associated with a labelled block.
+// We populate these as labels are encountered in forward gotos or
+// labelled statements.
+//
+type lblock struct {
+ _goto *BasicBlock
+ _break *BasicBlock
+ _continue *BasicBlock
+}
+
+// labelledBlock returns the branch target associated with the
+// specified label, creating it if needed.
+//
+func (f *Function) labelledBlock(label *ast.Ident) *lblock {
+ lb := f.lblocks[label.Obj]
+ if lb == nil {
+ lb = &lblock{_goto: f.newBasicBlock(label.Name)}
+ if f.lblocks == nil {
+ f.lblocks = make(map[*ast.Object]*lblock)
+ }
+ f.lblocks[label.Obj] = lb
+ }
+ return lb
+}
+
+// addParam adds a (non-escaping) parameter to f.Params of the
+// specified name, type and source position.
+//
+func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter {
+ v := &Parameter{
+ name: name,
+ typ: typ,
+ pos: pos,
+ parent: f,
+ }
+ f.Params = append(f.Params, v)
+ return v
+}
+
+func (f *Function) addParamObj(obj types.Object) *Parameter {
+ name := obj.Name()
+ if name == "" {
+ name = fmt.Sprintf("arg%d", len(f.Params))
+ }
+ param := f.addParam(name, obj.Type(), obj.Pos())
+ param.object = obj
+ return param
+}
+
+// addSpilledParam declares a parameter that is pre-spilled to the
+// stack; the function body will load/store the spilled location.
+// Subsequent lifting will eliminate spills where possible.
+//
+func (f *Function) addSpilledParam(obj types.Object) {
+ param := f.addParamObj(obj)
+ spill := &Alloc{Comment: obj.Name()}
+ spill.setType(types.NewPointer(obj.Type()))
+ spill.setPos(obj.Pos())
+ f.objects[obj] = spill
+ f.Locals = append(f.Locals, spill)
+ f.emit(spill)
+ f.emit(&Store{Addr: spill, Val: param})
+}
+
+// startBody initializes the function prior to generating SSA code for its body.
+// Precondition: f.Type() already set.
+//
+func (f *Function) startBody() {
+ f.currentBlock = f.newBasicBlock("entry")
+ f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init
+}
+
+// createSyntacticParams populates f.Params and generates code (spills
+// and named result locals) for all the parameters declared in the
+// syntax. In addition it populates the f.objects mapping.
+//
+// Preconditions:
+// f.startBody() was called.
+// Postcondition:
+// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0)
+//
+func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) {
+ // Receiver (at most one inner iteration).
+ if recv != nil {
+ for _, field := range recv.List {
+ for _, n := range field.Names {
+ f.addSpilledParam(f.Pkg.info.Defs[n])
+ }
+ // Anonymous receiver? No need to spill.
+ if field.Names == nil {
+ f.addParamObj(f.Signature.Recv())
+ }
+ }
+ }
+
+ // Parameters.
+ if functype.Params != nil {
+ n := len(f.Params) // 1 if has recv, 0 otherwise
+ for _, field := range functype.Params.List {
+ for _, n := range field.Names {
+ f.addSpilledParam(f.Pkg.info.Defs[n])
+ }
+ // Anonymous parameter? No need to spill.
+ if field.Names == nil {
+ f.addParamObj(f.Signature.Params().At(len(f.Params) - n))
+ }
+ }
+ }
+
+ // Named results.
+ if functype.Results != nil {
+ for _, field := range functype.Results.List {
+ // Implicit "var" decl of locals for named results.
+ for _, n := range field.Names {
+ f.namedResults = append(f.namedResults, f.addLocalForIdent(n))
+ }
+ }
+ }
+}
+
+// numberRegisters assigns numbers to all SSA registers
+// (value-defining Instructions) in f, to aid debugging.
+// (Non-Instruction Values are named at construction.)
+//
+func numberRegisters(f *Function) {
+ v := 0
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ switch instr.(type) {
+ case Value:
+ instr.(interface {
+ setNum(int)
+ }).setNum(v)
+ v++
+ }
+ }
+ }
+}
+
+// buildReferrers populates the def/use information in all non-nil
+// Value.Referrers slice.
+// Precondition: all such slices are initially empty.
+func buildReferrers(f *Function) {
+ var rands []*Value
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ rands = instr.Operands(rands[:0]) // recycle storage
+ for _, rand := range rands {
+ if r := *rand; r != nil {
+ if ref := r.Referrers(); ref != nil {
+ *ref = append(*ref, instr)
+ }
+ }
+ }
+ }
+ }
+}
+
+// finishBody() finalizes the function after SSA code generation of its body.
+func (f *Function) finishBody() {
+ f.objects = nil
+ f.currentBlock = nil
+ f.lblocks = nil
+
+ // Don't pin the AST in memory (except in debug mode).
+ if n := f.syntax; n != nil && !f.debugInfo() {
+ f.syntax = extentNode{n.Pos(), n.End()}
+ }
+
+ // Remove from f.Locals any Allocs that escape to the heap.
+ j := 0
+ for _, l := range f.Locals {
+ if !l.Heap {
+ f.Locals[j] = l
+ j++
+ }
+ }
+ // Nil out f.Locals[j:] to aid GC.
+ for i := j; i < len(f.Locals); i++ {
+ f.Locals[i] = nil
+ }
+ f.Locals = f.Locals[:j]
+
+ optimizeBlocks(f)
+
+ buildReferrers(f)
+
+ buildDomTree(f)
+
+ if f.Prog.mode&NaiveForm == 0 {
+ // For debugging pre-state of lifting pass:
+ // numberRegisters(f)
+ // f.WriteTo(os.Stderr)
+ lift(f)
+ }
+
+ f.namedResults = nil // (used by lifting)
+
+ numberRegisters(f)
+
+ if f.Prog.mode&PrintFunctions != 0 {
+ printMu.Lock()
+ f.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
+
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ mustSanityCheck(f, nil)
+ }
+}
+
+func (f *Function) RemoveNilBlocks() {
+ f.removeNilBlocks()
+}
+
+// removeNilBlocks eliminates nils from f.Blocks and updates each
+// BasicBlock.Index. Use this after any pass that may delete blocks.
+//
+func (f *Function) removeNilBlocks() {
+ j := 0
+ for _, b := range f.Blocks {
+ if b != nil {
+ b.Index = j
+ f.Blocks[j] = b
+ j++
+ }
+ }
+ // Nil out f.Blocks[j:] to aid GC.
+ for i := j; i < len(f.Blocks); i++ {
+ f.Blocks[i] = nil
+ }
+ f.Blocks = f.Blocks[:j]
+}
+
+// SetDebugMode sets the debug mode for package pkg. If true, all its
+// functions will include full debug info. This greatly increases the
+// size of the instruction stream, and causes Functions to depend upon
+// the ASTs, potentially keeping them live in memory for longer.
+//
+func (pkg *Package) SetDebugMode(debug bool) {
+ // TODO(adonovan): do we want ast.File granularity?
+ pkg.debug = debug
+}
+
+// debugInfo reports whether debug info is wanted for this function.
+func (f *Function) debugInfo() bool {
+ return f.Pkg != nil && f.Pkg.debug
+}
+
+// addNamedLocal creates a local variable, adds it to function f and
+// returns it. Its name and type are taken from obj. Subsequent
+// calls to f.lookup(obj) will return the same local.
+//
+func (f *Function) addNamedLocal(obj types.Object) *Alloc {
+ l := f.addLocal(obj.Type(), obj.Pos())
+ l.Comment = obj.Name()
+ f.objects[obj] = l
+ return l
+}
+
+func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc {
+ return f.addNamedLocal(f.Pkg.info.Defs[id])
+}
+
+// addLocal creates an anonymous local variable of type typ, adds it
+// to function f and returns it. pos is the optional source location.
+//
+func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
+ v := &Alloc{}
+ v.setType(types.NewPointer(typ))
+ v.setPos(pos)
+ f.Locals = append(f.Locals, v)
+ f.emit(v)
+ return v
+}
+
+// lookup returns the address of the named variable identified by obj
+// that is local to function f or one of its enclosing functions.
+// If escaping, the reference comes from a potentially escaping pointer
+// expression and the referent must be heap-allocated.
+//
+func (f *Function) lookup(obj types.Object, escaping bool) Value {
+ if v, ok := f.objects[obj]; ok {
+ if alloc, ok := v.(*Alloc); ok && escaping {
+ alloc.Heap = true
+ }
+ return v // function-local var (address)
+ }
+
+ // Definition must be in an enclosing function;
+ // plumb it through intervening closures.
+ if f.parent == nil {
+ panic("no ssa.Value for " + obj.String())
+ }
+ outer := f.parent.lookup(obj, true) // escaping
+ v := &FreeVar{
+ name: obj.Name(),
+ typ: outer.Type(),
+ pos: outer.Pos(),
+ outer: outer,
+ parent: f,
+ }
+ f.objects[obj] = v
+ f.FreeVars = append(f.FreeVars, v)
+ return v
+}
+
+// emit emits the specified instruction to function f.
+func (f *Function) emit(instr Instruction) Value {
+ return f.currentBlock.emit(instr)
+}
+
+// RelString returns the full name of this function, qualified by
+// package name, receiver type, etc.
+//
+// The specific formatting rules are not guaranteed and may change.
+//
+// Examples:
+// "math.IsNaN" // a package-level function
+// "(*bytes.Buffer).Bytes" // a declared method or a wrapper
+// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0)
+// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure)
+// "main.main$1" // an anonymous function in main
+// "main.init#1" // a declared init function
+// "main.init" // the synthesized package initializer
+//
+// When these functions are referred to from within the same package
+// (i.e. from == f.Pkg.Object), they are rendered without the package path.
+// For example: "IsNaN", "(*Buffer).Bytes", etc.
+//
+// All non-synthetic functions have distinct package-qualified names.
+// (But two methods may have the same name "(T).f" if one is a synthetic
+// wrapper promoting a non-exported method "f" from another package; in
+// that case, the strings are equal but the identifiers "f" are distinct.)
+//
+func (f *Function) RelString(from *types.Package) string {
+ // Anonymous?
+ if f.parent != nil {
+ // An anonymous function's Name() looks like "parentName$1",
+ // but its String() should include the type/package/etc.
+ parent := f.parent.RelString(from)
+ for i, anon := range f.parent.AnonFuncs {
+ if anon == f {
+ return fmt.Sprintf("%s$%d", parent, 1+i)
+ }
+ }
+
+ return f.name // should never happen
+ }
+
+ // Method (declared or wrapper)?
+ if recv := f.Signature.Recv(); recv != nil {
+ return f.relMethod(from, recv.Type())
+ }
+
+ // Thunk?
+ if f.method != nil {
+ return f.relMethod(from, f.method.Recv())
+ }
+
+ // Bound?
+ if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") {
+ return f.relMethod(from, f.FreeVars[0].Type())
+ }
+
+ // Package-level function?
+ // Prefix with package name for cross-package references only.
+ if p := f.pkg(); p != nil && p != from {
+ return fmt.Sprintf("%s.%s", p.Path(), f.name)
+ }
+
+ // Unknown.
+ return f.name
+}
+
+func (f *Function) relMethod(from *types.Package, recv types.Type) string {
+ return fmt.Sprintf("(%s).%s", relType(recv, from), f.name)
+}
+
+// writeSignature writes to buf the signature sig in declaration syntax.
+func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) {
+ buf.WriteString("func ")
+ if recv := sig.Recv(); recv != nil {
+ buf.WriteString("(")
+ if n := params[0].Name(); n != "" {
+ buf.WriteString(n)
+ buf.WriteString(" ")
+ }
+ types.WriteType(buf, params[0].Type(), types.RelativeTo(from))
+ buf.WriteString(") ")
+ }
+ buf.WriteString(name)
+ types.WriteSignature(buf, sig, types.RelativeTo(from))
+}
+
+func (f *Function) pkg() *types.Package {
+ if f.Pkg != nil {
+ return f.Pkg.Pkg
+ }
+ return nil
+}
+
+var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer
+
+func (f *Function) WriteTo(w io.Writer) (int64, error) {
+ var buf bytes.Buffer
+ WriteFunction(&buf, f)
+ n, err := w.Write(buf.Bytes())
+ return int64(n), err
+}
+
+// WriteFunction writes to buf a human-readable "disassembly" of f.
+func WriteFunction(buf *bytes.Buffer, f *Function) {
+ fmt.Fprintf(buf, "# Name: %s\n", f.String())
+ if f.Pkg != nil {
+ fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path())
+ }
+ if syn := f.Synthetic; syn != "" {
+ fmt.Fprintln(buf, "# Synthetic:", syn)
+ }
+ if pos := f.Pos(); pos.IsValid() {
+ fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos))
+ }
+
+ if f.parent != nil {
+ fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name())
+ }
+
+ if f.Recover != nil {
+ fmt.Fprintf(buf, "# Recover: %s\n", f.Recover)
+ }
+
+ from := f.pkg()
+
+ if f.FreeVars != nil {
+ buf.WriteString("# Free variables:\n")
+ for i, fv := range f.FreeVars {
+ fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from))
+ }
+ }
+
+ if len(f.Locals) > 0 {
+ buf.WriteString("# Locals:\n")
+ for i, l := range f.Locals {
+ fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from))
+ }
+ }
+ writeSignature(buf, from, f.Name(), f.Signature, f.Params)
+ buf.WriteString(":\n")
+
+ if f.Blocks == nil {
+ buf.WriteString("\t(external)\n")
+ }
+
+ // NB. column calculations are confused by non-ASCII
+ // characters and assume 8-space tabs.
+ const punchcard = 80 // for old time's sake.
+ const tabwidth = 8
+ for _, b := range f.Blocks {
+ if b == nil {
+ // Corrupt CFG.
+ fmt.Fprintf(buf, ".nil:\n")
+ continue
+ }
+ n, _ := fmt.Fprintf(buf, "%d:", b.Index)
+ bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs))
+ fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg)
+
+ if false { // CFG debugging
+ fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs)
+ }
+ for _, instr := range b.Instrs {
+ buf.WriteString("\t")
+ switch v := instr.(type) {
+ case Value:
+ l := punchcard - tabwidth
+ // Left-align the instruction.
+ if name := v.Name(); name != "" {
+ n, _ := fmt.Fprintf(buf, "%s = ", name)
+ l -= n
+ }
+ n, _ := buf.WriteString(instr.String())
+ l -= n
+ // Right-align the type if there's space.
+ if t := v.Type(); t != nil {
+ buf.WriteByte(' ')
+ ts := relType(t, from)
+ l -= len(ts) + len(" ") // (spaces before and after type)
+ if l > 0 {
+ fmt.Fprintf(buf, "%*s", l, "")
+ }
+ buf.WriteString(ts)
+ }
+ case nil:
+ // Be robust against bad transforms.
+ buf.WriteString("<deleted>")
+ default:
+ buf.WriteString(instr.String())
+ }
+ buf.WriteString("\n")
+ }
+ }
+ fmt.Fprintf(buf, "\n")
+}
+
+// newBasicBlock adds to f a new basic block and returns it. It does
+// not automatically become the current block for subsequent calls to emit.
+// comment is an optional string for more readable debugging output.
+//
+func (f *Function) newBasicBlock(comment string) *BasicBlock {
+ b := &BasicBlock{
+ Index: len(f.Blocks),
+ Comment: comment,
+ parent: f,
+ }
+ b.Succs = b.succs2[:0]
+ f.Blocks = append(f.Blocks, b)
+ return b
+}
+
+// NewFunction returns a new synthetic Function instance belonging to
+// prog, with its name and signature fields set as specified.
+//
+// The caller is responsible for initializing the remaining fields of
+// the function object, e.g. Pkg, Params, Blocks.
+//
+// It is practically impossible for clients to construct well-formed
+// SSA functions/packages/programs directly, so we assume this is the
+// job of the Builder alone. NewFunction exists to provide clients a
+// little flexibility. For example, analysis tools may wish to
+// construct fake Functions for the root of the callgraph, a fake
+// "reflect" package, etc.
+//
+// TODO(adonovan): think harder about the API here.
+//
+func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function {
+ return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance}
+}
+
+type extentNode [2]token.Pos
+
+func (n extentNode) Pos() token.Pos { return n[0] }
+func (n extentNode) End() token.Pos { return n[1] }
+
+// Syntax returns an ast.Node whose Pos/End methods provide the
+// lexical extent of the function if it was defined by Go source code
+// (f.Synthetic==""), or nil otherwise.
+//
+// If f was built with debug information (see Package.SetDebugRef),
+// the result is the *ast.FuncDecl or *ast.FuncLit that declared the
+// function. Otherwise, it is an opaque Node providing only position
+// information; this avoids pinning the AST in memory.
+//
+func (f *Function) Syntax() ast.Node { return f.syntax }
diff --git a/vendor/honnef.co/go/tools/ssa/identical.go b/vendor/honnef.co/go/tools/ssa/identical.go
new file mode 100644
index 0000000..53cbee1
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/identical.go
@@ -0,0 +1,7 @@
+// +build go1.8
+
+package ssa
+
+import "go/types"
+
+var structTypesIdentical = types.IdenticalIgnoreTags
diff --git a/vendor/honnef.co/go/tools/ssa/identical_17.go b/vendor/honnef.co/go/tools/ssa/identical_17.go
new file mode 100644
index 0000000..da89d33
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/identical_17.go
@@ -0,0 +1,7 @@
+// +build !go1.8
+
+package ssa
+
+import "go/types"
+
+var structTypesIdentical = types.Identical
diff --git a/vendor/honnef.co/go/tools/ssa/lift.go b/vendor/honnef.co/go/tools/ssa/lift.go
new file mode 100644
index 0000000..0270797
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/lift.go
@@ -0,0 +1,608 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file defines the lifting pass which tries to "lift" Alloc
+// cells (new/local variables) into SSA registers, replacing loads
+// with the dominating stored value, eliminating loads and stores, and
+// inserting φ-nodes as needed.
+
+// Cited papers and resources:
+//
+// Ron Cytron et al. 1991. Efficiently computing SSA form...
+// http://doi.acm.org/10.1145/115372.115320
+//
+// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm.
+// Software Practice and Experience 2001, 4:1-10.
+// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf
+//
+// Daniel Berlin, llvmdev mailing list, 2012.
+// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html
+// (Be sure to expand the whole thread.)
+
+// TODO(adonovan): opt: there are many optimizations worth evaluating, and
+// the conventional wisdom for SSA construction is that a simple
+// algorithm well engineered often beats those of better asymptotic
+// complexity on all but the most egregious inputs.
+//
+// Danny Berlin suggests that the Cooper et al. algorithm for
+// computing the dominance frontier is superior to Cytron et al.
+// Furthermore he recommends that rather than computing the DF for the
+// whole function then renaming all alloc cells, it may be cheaper to
+// compute the DF for each alloc cell separately and throw it away.
+//
+// Consider exploiting liveness information to avoid creating dead
+// φ-nodes which we then immediately remove.
+//
+// Integrate lifting with scalar replacement of aggregates (SRA) since
+// the two are synergistic.
+//
+// Also see many other "TODO: opt" suggestions in the code.
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "math/big"
+ "os"
+)
+
+// If true, perform sanity checking and show diagnostic information at
+// each step of lifting. Very verbose.
+const debugLifting = false
+
+// domFrontier maps each block to the set of blocks in its dominance
+// frontier. The outer slice is conceptually a map keyed by
+// Block.Index. The inner slice is conceptually a set, possibly
+// containing duplicates.
+//
+// TODO(adonovan): opt: measure impact of dups; consider a packed bit
+// representation, e.g. big.Int, and bitwise parallel operations for
+// the union step in the Children loop.
+//
+// domFrontier's methods mutate the slice's elements but not its
+// length, so their receivers needn't be pointers.
+//
+type domFrontier [][]*BasicBlock
+
+func (df domFrontier) add(u, v *BasicBlock) {
+ p := &df[u.Index]
+ *p = append(*p, v)
+}
+
+// build builds the dominance frontier df for the dominator (sub)tree
+// rooted at u, using the Cytron et al. algorithm.
+//
+// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA
+// by pruning the entire IDF computation, rather than merely pruning
+// the DF -> IDF step.
+func (df domFrontier) build(u *BasicBlock) {
+ // Encounter each node u in postorder of dom tree.
+ for _, child := range u.dom.children {
+ df.build(child)
+ }
+ for _, vb := range u.Succs {
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ for _, w := range u.dom.children {
+ for _, vb := range df[w.Index] {
+ // TODO(adonovan): opt: use word-parallel bitwise union.
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ }
+}
+
+func buildDomFrontier(fn *Function) domFrontier {
+ df := make(domFrontier, len(fn.Blocks))
+ df.build(fn.Blocks[0])
+ if fn.Recover != nil {
+ df.build(fn.Recover)
+ }
+ return df
+}
+
+func RemoveInstr(refs []Instruction, instr Instruction) []Instruction {
+ return removeInstr(refs, instr)
+}
+
+func removeInstr(refs []Instruction, instr Instruction) []Instruction {
+ i := 0
+ for _, ref := range refs {
+ if ref == instr {
+ continue
+ }
+ refs[i] = ref
+ i++
+ }
+ for j := i; j != len(refs); j++ {
+ refs[j] = nil // aid GC
+ }
+ return refs[:i]
+}
+
+// lift attempts to replace local and new Allocs accessed only with
+// load/store by SSA registers, inserting φ-nodes where necessary.
+// The result is a program in classical pruned SSA form.
+//
+// Preconditions:
+// - fn has no dead blocks (blockopt has run).
+// - Def/use info (Operands and Referrers) is up-to-date.
+// - The dominator tree is up-to-date.
+//
+func lift(fn *Function) {
+ // TODO(adonovan): opt: lots of little optimizations may be
+ // worthwhile here, especially if they cause us to avoid
+ // buildDomFrontier. For example:
+ //
+ // - Alloc never loaded? Eliminate.
+ // - Alloc never stored? Replace all loads with a zero constant.
+ // - Alloc stored once? Replace loads with dominating store;
+ // don't forget that an Alloc is itself an effective store
+ // of zero.
+ // - Alloc used only within a single block?
+ // Use degenerate algorithm avoiding φ-nodes.
+ // - Consider synergy with scalar replacement of aggregates (SRA).
+ // e.g. *(&x.f) where x is an Alloc.
+ // Perhaps we'd get better results if we generated this as x.f
+ // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)).
+ // Unclear.
+ //
+ // But we will start with the simplest correct code.
+ df := buildDomFrontier(fn)
+
+ if debugLifting {
+ title := false
+ for i, blocks := range df {
+ if blocks != nil {
+ if !title {
+ fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn)
+ title = true
+ }
+ fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks)
+ }
+ }
+ }
+
+ newPhis := make(newPhiMap)
+
+ // During this pass we will replace some BasicBlock.Instrs
+ // (allocs, loads and stores) with nil, keeping a count in
+ // BasicBlock.gaps. At the end we will reset Instrs to the
+ // concatenation of all non-dead newPhis and non-nil Instrs
+ // for the block, reusing the original array if space permits.
+
+ // While we're here, we also eliminate 'rundefers'
+ // instructions in functions that contain no 'defer'
+ // instructions.
+ usesDefer := false
+
+ // Determine which allocs we can lift and number them densely.
+ // The renaming phase uses this numbering for compact maps.
+ numAllocs := 0
+ for _, b := range fn.Blocks {
+ b.gaps = 0
+ b.rundefers = 0
+ for _, instr := range b.Instrs {
+ switch instr := instr.(type) {
+ case *Alloc:
+ index := -1
+ if liftAlloc(df, instr, newPhis) {
+ index = numAllocs
+ numAllocs++
+ }
+ instr.index = index
+ case *Defer:
+ usesDefer = true
+ case *RunDefers:
+ b.rundefers++
+ }
+ }
+ }
+
+ // renaming maps an alloc (keyed by index) to its replacement
+ // value. Initially the renaming contains nil, signifying the
+ // zero constant of the appropriate type; we construct the
+ // Const lazily at most once on each path through the domtree.
+ // TODO(adonovan): opt: cache per-function not per subtree.
+ renaming := make([]Value, numAllocs)
+
+ // Renaming.
+ rename(fn.Blocks[0], renaming, newPhis)
+
+ // Eliminate dead new phis, then prepend the live ones to each block.
+ for _, b := range fn.Blocks {
+
+ // Compress the newPhis slice to eliminate unused phis.
+ // TODO(adonovan): opt: compute liveness to avoid
+ // placing phis in blocks for which the alloc cell is
+ // not live.
+ nps := newPhis[b]
+ j := 0
+ for _, np := range nps {
+ if !phiIsLive(np.phi) {
+ // discard it, first removing it from referrers
+ for _, newval := range np.phi.Edges {
+ if refs := newval.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, np.phi)
+ }
+ }
+ continue
+ }
+ nps[j] = np
+ j++
+ }
+ nps = nps[:j]
+
+ rundefersToKill := b.rundefers
+ if usesDefer {
+ rundefersToKill = 0
+ }
+
+ if j+b.gaps+rundefersToKill == 0 {
+ continue // fast path: no new phis or gaps
+ }
+
+ // Compact nps + non-nil Instrs into a new slice.
+ // TODO(adonovan): opt: compact in situ if there is
+ // sufficient space or slack in the slice.
+ dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill)
+ for i, np := range nps {
+ dst[i] = np.phi
+ }
+ for _, instr := range b.Instrs {
+ if instr == nil {
+ continue
+ }
+ if !usesDefer {
+ if _, ok := instr.(*RunDefers); ok {
+ continue
+ }
+ }
+ dst[j] = instr
+ j++
+ }
+ for i, np := range nps {
+ dst[i] = np.phi
+ }
+ b.Instrs = dst
+ }
+
+ // Remove any fn.Locals that were lifted.
+ j := 0
+ for _, l := range fn.Locals {
+ if l.index < 0 {
+ fn.Locals[j] = l
+ j++
+ }
+ }
+ // Nil out fn.Locals[j:] to aid GC.
+ for i := j; i < len(fn.Locals); i++ {
+ fn.Locals[i] = nil
+ }
+ fn.Locals = fn.Locals[:j]
+}
+
+func phiIsLive(phi *Phi) bool {
+ for _, instr := range *phi.Referrers() {
+ if instr == phi {
+ continue // self-refs don't count
+ }
+ if _, ok := instr.(*DebugRef); ok {
+ continue // debug refs don't count
+ }
+ return true
+ }
+ return false
+}
+
+type blockSet struct{ big.Int } // (inherit methods from Int)
+
+// add adds b to the set and returns true if the set changed.
+func (s *blockSet) add(b *BasicBlock) bool {
+ i := b.Index
+ if s.Bit(i) != 0 {
+ return false
+ }
+ s.SetBit(&s.Int, i, 1)
+ return true
+}
+
+// take removes an arbitrary element from a set s and
+// returns its index, or returns -1 if empty.
+func (s *blockSet) take() int {
+ l := s.BitLen()
+ for i := 0; i < l; i++ {
+ if s.Bit(i) == 1 {
+ s.SetBit(&s.Int, i, 0)
+ return i
+ }
+ }
+ return -1
+}
+
+// newPhi is a pair of a newly introduced φ-node and the lifted Alloc
+// it replaces.
+type newPhi struct {
+ phi *Phi
+ alloc *Alloc
+}
+
+// newPhiMap records for each basic block, the set of newPhis that
+// must be prepended to the block.
+type newPhiMap map[*BasicBlock][]newPhi
+
+// liftAlloc determines whether alloc can be lifted into registers,
+// and if so, it populates newPhis with all the φ-nodes it may require
+// and returns true.
+//
+func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool {
+ // Don't lift aggregates into registers, because we don't have
+ // a way to express their zero-constants.
+ switch deref(alloc.Type()).Underlying().(type) {
+ case *types.Array, *types.Struct:
+ return false
+ }
+
+ // Don't lift named return values in functions that defer
+ // calls that may recover from panic.
+ if fn := alloc.Parent(); fn.Recover != nil {
+ for _, nr := range fn.namedResults {
+ if nr == alloc {
+ return false
+ }
+ }
+ }
+
+ // Compute defblocks, the set of blocks containing a
+ // definition of the alloc cell.
+ var defblocks blockSet
+ for _, instr := range *alloc.Referrers() {
+ // Bail out if we discover the alloc is not liftable;
+ // the only operations permitted to use the alloc are
+ // loads/stores into the cell, and DebugRef.
+ switch instr := instr.(type) {
+ case *Store:
+ if instr.Val == alloc {
+ return false // address used as value
+ }
+ if instr.Addr != alloc {
+ panic("Alloc.Referrers is inconsistent")
+ }
+ defblocks.add(instr.Block())
+ case *UnOp:
+ if instr.Op != token.MUL {
+ return false // not a load
+ }
+ if instr.X != alloc {
+ panic("Alloc.Referrers is inconsistent")
+ }
+ case *DebugRef:
+ // ok
+ default:
+ return false // some other instruction
+ }
+ }
+ // The Alloc itself counts as a (zero) definition of the cell.
+ defblocks.add(alloc.Block())
+
+ if debugLifting {
+ fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
+ }
+
+ fn := alloc.Parent()
+
+ // Φ-insertion.
+ //
+ // What follows is the body of the main loop of the insert-φ
+ // function described by Cytron et al, but instead of using
+ // counter tricks, we just reset the 'hasAlready' and 'work'
+ // sets each iteration. These are bitmaps so it's pretty cheap.
+ //
+ // TODO(adonovan): opt: recycle slice storage for W,
+ // hasAlready, defBlocks across liftAlloc calls.
+ var hasAlready blockSet
+
+ // Initialize W and work to defblocks.
+ var work blockSet = defblocks // blocks seen
+ var W blockSet // blocks to do
+ W.Set(&defblocks.Int)
+
+ // Traverse iterated dominance frontier, inserting φ-nodes.
+ for i := W.take(); i != -1; i = W.take() {
+ u := fn.Blocks[i]
+ for _, v := range df[u.Index] {
+ if hasAlready.add(v) {
+ // Create φ-node.
+ // It will be prepended to v.Instrs later, if needed.
+ phi := &Phi{
+ Edges: make([]Value, len(v.Preds)),
+ Comment: alloc.Comment,
+ }
+ phi.pos = alloc.Pos()
+ phi.setType(deref(alloc.Type()))
+ phi.block = v
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v)
+ }
+ newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
+
+ if work.add(v) {
+ W.add(v)
+ }
+ }
+ }
+ }
+
+ return true
+}
+
+func ReplaceAll(x, y Value) {
+ replaceAll(x, y)
+}
+
+// replaceAll replaces all intraprocedural uses of x with y,
+// updating x.Referrers and y.Referrers.
+// Precondition: x.Referrers() != nil, i.e. x must be local to some function.
+//
+func replaceAll(x, y Value) {
+ var rands []*Value
+ pxrefs := x.Referrers()
+ pyrefs := y.Referrers()
+ for _, instr := range *pxrefs {
+ rands = instr.Operands(rands[:0]) // recycle storage
+ for _, rand := range rands {
+ if *rand != nil {
+ if *rand == x {
+ *rand = y
+ }
+ }
+ }
+ if pyrefs != nil {
+ *pyrefs = append(*pyrefs, instr) // dups ok
+ }
+ }
+ *pxrefs = nil // x is now unreferenced
+}
+
+// renamed returns the value to which alloc is being renamed,
+// constructing it lazily if it's the implicit zero initialization.
+//
+func renamed(renaming []Value, alloc *Alloc) Value {
+ v := renaming[alloc.index]
+ if v == nil {
+ v = zeroConst(deref(alloc.Type()))
+ renaming[alloc.index] = v
+ }
+ return v
+}
+
+// rename implements the (Cytron et al) SSA renaming algorithm, a
+// preorder traversal of the dominator tree replacing all loads of
+// Alloc cells with the value stored to that cell by the dominating
+// store instruction. For lifting, we need only consider loads,
+// stores and φ-nodes.
+//
+// renaming is a map from *Alloc (keyed by index number) to its
+// dominating stored value; newPhis[x] is the set of new φ-nodes to be
+// prepended to block x.
+//
+func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
+ // Each φ-node becomes the new name for its associated Alloc.
+ for _, np := range newPhis[u] {
+ phi := np.phi
+ alloc := np.alloc
+ renaming[alloc.index] = phi
+ }
+
+ // Rename loads and stores of allocs.
+ for i, instr := range u.Instrs {
+ switch instr := instr.(type) {
+ case *Alloc:
+ if instr.index >= 0 { // store of zero to Alloc cell
+ // Replace dominated loads by the zero value.
+ renaming[instr.index] = nil
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr)
+ }
+ // Delete the Alloc.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+
+ case *Store:
+ if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell
+ // Replace dominated loads by the stored value.
+ renaming[alloc.index] = instr.Val
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n",
+ instr, instr.Val.Name())
+ }
+ // Remove the store from the referrer list of the stored value.
+ if refs := instr.Val.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, instr)
+ }
+ // Delete the Store.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+
+ case *UnOp:
+ if instr.Op == token.MUL {
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n",
+ instr.Name(), instr, newval.Name())
+ }
+ // Replace all references to
+ // the loaded value by the
+ // dominating stored value.
+ replaceAll(instr, newval)
+ // Delete the Load.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+ }
+
+ case *DebugRef:
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell
+ if instr.IsAddr {
+ instr.X = renamed(renaming, alloc)
+ instr.IsAddr = false
+
+ // Add DebugRef to instr.X's referrers.
+ if refs := instr.X.Referrers(); refs != nil {
+ *refs = append(*refs, instr)
+ }
+ } else {
+ // A source expression denotes the address
+ // of an Alloc that was optimized away.
+ instr.X = nil
+
+ // Delete the DebugRef.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+ }
+ }
+ }
+
+ // For each φ-node in a CFG successor, rename the edge.
+ for _, v := range u.Succs {
+ phis := newPhis[v]
+ if len(phis) == 0 {
+ continue
+ }
+ i := v.predIndex(u)
+ for _, np := range phis {
+ phi := np.phi
+ alloc := np.alloc
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n",
+ phi.Name(), u, v, i, alloc.Name(), newval.Name())
+ }
+ phi.Edges[i] = newval
+ if prefs := newval.Referrers(); prefs != nil {
+ *prefs = append(*prefs, phi)
+ }
+ }
+ }
+
+ // Continue depth-first recursion over domtree, pushing a
+ // fresh copy of the renaming map for each subtree.
+ for _, v := range u.dom.children {
+ // TODO(adonovan): opt: avoid copy on final iteration; use destructive update.
+ r := make([]Value, len(renaming))
+ copy(r, renaming)
+ rename(v, r, newPhis)
+ }
+}
diff --git a/vendor/honnef.co/go/tools/ssa/lvalue.go b/vendor/honnef.co/go/tools/ssa/lvalue.go
new file mode 100644
index 0000000..d2226a9
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/lvalue.go
@@ -0,0 +1,125 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// lvalues are the union of addressable expressions and map-index
+// expressions.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// An lvalue represents an assignable location that may appear on the
+// left-hand side of an assignment. This is a generalization of a
+// pointer to permit updates to elements of maps.
+//
+type lvalue interface {
+ store(fn *Function, v Value) // stores v into the location
+ load(fn *Function) Value // loads the contents of the location
+ address(fn *Function) Value // address of the location
+ typ() types.Type // returns the type of the location
+}
+
+// An address is an lvalue represented by a true pointer.
+type address struct {
+ addr Value
+ pos token.Pos // source position
+ expr ast.Expr // source syntax of the value (not address) [debug mode]
+}
+
+func (a *address) load(fn *Function) Value {
+ load := emitLoad(fn, a.addr)
+ load.pos = a.pos
+ return load
+}
+
+func (a *address) store(fn *Function, v Value) {
+ store := emitStore(fn, a.addr, v, a.pos)
+ if a.expr != nil {
+ // store.Val is v, converted for assignability.
+ emitDebugRef(fn, a.expr, store.Val, false)
+ }
+}
+
+func (a *address) address(fn *Function) Value {
+ if a.expr != nil {
+ emitDebugRef(fn, a.expr, a.addr, true)
+ }
+ return a.addr
+}
+
+func (a *address) typ() types.Type {
+ return deref(a.addr.Type())
+}
+
+// An element is an lvalue represented by m[k], the location of an
+// element of a map or string. These locations are not addressable
+// since pointers cannot be formed from them, but they do support
+// load(), and in the case of maps, store().
+//
+type element struct {
+ m, k Value // map or string
+ t types.Type // map element type or string byte type
+ pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v)
+}
+
+func (e *element) load(fn *Function) Value {
+ l := &Lookup{
+ X: e.m,
+ Index: e.k,
+ }
+ l.setPos(e.pos)
+ l.setType(e.t)
+ return fn.emit(l)
+}
+
+func (e *element) store(fn *Function, v Value) {
+ up := &MapUpdate{
+ Map: e.m,
+ Key: e.k,
+ Value: emitConv(fn, v, e.t),
+ }
+ up.pos = e.pos
+ fn.emit(up)
+}
+
+func (e *element) address(fn *Function) Value {
+ panic("map/string elements are not addressable")
+}
+
+func (e *element) typ() types.Type {
+ return e.t
+}
+
+// A blank is a dummy variable whose name is "_".
+// It is not reified: loads are illegal and stores are ignored.
+//
+type blank struct{}
+
+func (bl blank) load(fn *Function) Value {
+ panic("blank.load is illegal")
+}
+
+func (bl blank) store(fn *Function, v Value) {
+ s := &BlankStore{
+ Val: v,
+ }
+ fn.emit(s)
+}
+
+func (bl blank) address(fn *Function) Value {
+ panic("blank var is not addressable")
+}
+
+func (bl blank) typ() types.Type {
+ // This should be the type of the blank Ident; the typechecker
+ // doesn't provide this yet, but fortunately, we don't need it
+ // yet either.
+ panic("blank.typ is unimplemented")
+}
diff --git a/vendor/honnef.co/go/tools/ssa/methods.go b/vendor/honnef.co/go/tools/ssa/methods.go
new file mode 100644
index 0000000..7d1fb42
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/methods.go
@@ -0,0 +1,241 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file defines utilities for population of method sets.
+
+import (
+ "fmt"
+ "go/types"
+)
+
+// MethodValue returns the Function implementing method sel, building
+// wrapper methods on demand. It returns nil if sel denotes an
+// abstract (interface) method.
+//
+// Precondition: sel.Kind() == MethodVal.
+//
+// Thread-safe.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+//
+func (prog *Program) MethodValue(sel *types.Selection) *Function {
+ if sel.Kind() != types.MethodVal {
+ panic(fmt.Sprintf("Method(%s) kind != MethodVal", sel))
+ }
+ T := sel.Recv()
+ if isInterface(T) {
+ return nil // abstract method
+ }
+ if prog.mode&LogSource != 0 {
+ defer logStack("Method %s %v", T, sel)()
+ }
+
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ return prog.addMethod(prog.createMethodSet(T), sel)
+}
+
+// LookupMethod returns the implementation of the method of type T
+// identified by (pkg, name). It returns nil if the method exists but
+// is abstract, and panics if T has no such method.
+//
+func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function {
+ sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name)
+ if sel == nil {
+ panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name)))
+ }
+ return prog.MethodValue(sel)
+}
+
+// methodSet contains the (concrete) methods of a non-interface type.
+type methodSet struct {
+ mapping map[string]*Function // populated lazily
+ complete bool // mapping contains all methods
+}
+
+// Precondition: !isInterface(T).
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+func (prog *Program) createMethodSet(T types.Type) *methodSet {
+ mset, ok := prog.methodSets.At(T).(*methodSet)
+ if !ok {
+ mset = &methodSet{mapping: make(map[string]*Function)}
+ prog.methodSets.Set(T, mset)
+ }
+ return mset
+}
+
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function {
+ if sel.Kind() == types.MethodExpr {
+ panic(sel)
+ }
+ id := sel.Obj().Id()
+ fn := mset.mapping[id]
+ if fn == nil {
+ obj := sel.Obj().(*types.Func)
+
+ needsPromotion := len(sel.Index()) > 1
+ needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv())
+ if needsPromotion || needsIndirection {
+ fn = makeWrapper(prog, sel)
+ } else {
+ fn = prog.declaredFunc(obj)
+ }
+ if fn.Signature.Recv() == nil {
+ panic(fn) // missing receiver
+ }
+ mset.mapping[id] = fn
+ }
+ return fn
+}
+
+// RuntimeTypes returns a new unordered slice containing all
+// concrete types in the program for which a complete (non-empty)
+// method set is required at run-time.
+//
+// Thread-safe.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+//
+func (prog *Program) RuntimeTypes() []types.Type {
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ var res []types.Type
+ prog.methodSets.Iterate(func(T types.Type, v interface{}) {
+ if v.(*methodSet).complete {
+ res = append(res, T)
+ }
+ })
+ return res
+}
+
+// declaredFunc returns the concrete function/method denoted by obj.
+// Panic ensues if there is none.
+//
+func (prog *Program) declaredFunc(obj *types.Func) *Function {
+ if v := prog.packageLevelValue(obj); v != nil {
+ return v.(*Function)
+ }
+ panic("no concrete method: " + obj.String())
+}
+
+// needMethodsOf ensures that runtime type information (including the
+// complete method set) is available for the specified type T and all
+// its subcomponents.
+//
+// needMethodsOf must be called for at least every type that is an
+// operand of some MakeInterface instruction, and for the type of
+// every exported package member.
+//
+// Precondition: T is not a method signature (*Signature with Recv()!=nil).
+//
+// Thread-safe. (Called via emitConv from multiple builder goroutines.)
+//
+// TODO(adonovan): make this faster. It accounts for 20% of SSA build time.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+//
+func (prog *Program) needMethodsOf(T types.Type) {
+ prog.methodsMu.Lock()
+ prog.needMethods(T, false)
+ prog.methodsMu.Unlock()
+}
+
+// Precondition: T is not a method signature (*Signature with Recv()!=nil).
+// Recursive case: skip => don't create methods for T.
+//
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+//
+func (prog *Program) needMethods(T types.Type, skip bool) {
+ // Each package maintains its own set of types it has visited.
+ if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok {
+ // needMethods(T) was previously called
+ if !prevSkip || skip {
+ return // already seen, with same or false 'skip' value
+ }
+ }
+ prog.runtimeTypes.Set(T, skip)
+
+ tmset := prog.MethodSets.MethodSet(T)
+
+ if !skip && !isInterface(T) && tmset.Len() > 0 {
+ // Create methods of T.
+ mset := prog.createMethodSet(T)
+ if !mset.complete {
+ mset.complete = true
+ n := tmset.Len()
+ for i := 0; i < n; i++ {
+ prog.addMethod(mset, tmset.At(i))
+ }
+ }
+ }
+
+ // Recursion over signatures of each method.
+ for i := 0; i < tmset.Len(); i++ {
+ sig := tmset.At(i).Type().(*types.Signature)
+ prog.needMethods(sig.Params(), false)
+ prog.needMethods(sig.Results(), false)
+ }
+
+ switch t := T.(type) {
+ case *types.Basic:
+ // nop
+
+ case *types.Interface:
+ // nop---handled by recursion over method set.
+
+ case *types.Pointer:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Slice:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Chan:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Map:
+ prog.needMethods(t.Key(), false)
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Signature:
+ if t.Recv() != nil {
+ panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv()))
+ }
+ prog.needMethods(t.Params(), false)
+ prog.needMethods(t.Results(), false)
+
+ case *types.Named:
+ // A pointer-to-named type can be derived from a named
+ // type via reflection. It may have methods too.
+ prog.needMethods(types.NewPointer(T), false)
+
+ // Consider 'type T struct{S}' where S has methods.
+ // Reflection provides no way to get from T to struct{S},
+ // only to S, so the method set of struct{S} is unwanted,
+ // so set 'skip' flag during recursion.
+ prog.needMethods(t.Underlying(), true)
+
+ case *types.Array:
+ prog.needMethods(t.Elem(), false)
+
+ case *types.Struct:
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ prog.needMethods(t.Field(i).Type(), false)
+ }
+
+ case *types.Tuple:
+ for i, n := 0, t.Len(); i < n; i++ {
+ prog.needMethods(t.At(i).Type(), false)
+ }
+
+ default:
+ panic(T)
+ }
+}
diff --git a/vendor/honnef.co/go/tools/ssa/mode.go b/vendor/honnef.co/go/tools/ssa/mode.go
new file mode 100644
index 0000000..d2a2698
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/mode.go
@@ -0,0 +1,100 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the BuilderMode type and its command-line flag.
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// BuilderMode is a bitmask of options for diagnostics and checking.
+//
+// *BuilderMode satisfies the flag.Value interface. Example:
+//
+// var mode = ssa.BuilderMode(0)
+// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
+//
+type BuilderMode uint
+
+const (
+ PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout
+ PrintFunctions // Print function SSA code to stdout
+ LogSource // Log source locations as SSA builder progresses
+ SanityCheckFunctions // Perform sanity checking of function bodies
+ NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers
+ BuildSerially // Build packages serially, not in parallel.
+ GlobalDebug // Enable debug info for all packages
+ BareInits // Build init functions without guards or calls to dependent inits
+)
+
+const BuilderModeDoc = `Options controlling the SSA builder.
+The value is a sequence of zero or more of these letters:
+C perform sanity [C]hecking of the SSA form.
+D include [D]ebug info for every function.
+P print [P]ackage inventory.
+F print [F]unction SSA code.
+S log [S]ource locations as SSA builder progresses.
+L build distinct packages seria[L]ly instead of in parallel.
+N build [N]aive SSA form: don't replace local loads/stores with registers.
+I build bare [I]nit functions: no init guards or calls to dependent inits.
+`
+
+func (m BuilderMode) String() string {
+ var buf bytes.Buffer
+ if m&GlobalDebug != 0 {
+ buf.WriteByte('D')
+ }
+ if m&PrintPackages != 0 {
+ buf.WriteByte('P')
+ }
+ if m&PrintFunctions != 0 {
+ buf.WriteByte('F')
+ }
+ if m&LogSource != 0 {
+ buf.WriteByte('S')
+ }
+ if m&SanityCheckFunctions != 0 {
+ buf.WriteByte('C')
+ }
+ if m&NaiveForm != 0 {
+ buf.WriteByte('N')
+ }
+ if m&BuildSerially != 0 {
+ buf.WriteByte('L')
+ }
+ return buf.String()
+}
+
+// Set parses the flag characters in s and updates *m.
+func (m *BuilderMode) Set(s string) error {
+ var mode BuilderMode
+ for _, c := range s {
+ switch c {
+ case 'D':
+ mode |= GlobalDebug
+ case 'P':
+ mode |= PrintPackages
+ case 'F':
+ mode |= PrintFunctions
+ case 'S':
+ mode |= LogSource | BuildSerially
+ case 'C':
+ mode |= SanityCheckFunctions
+ case 'N':
+ mode |= NaiveForm
+ case 'L':
+ mode |= BuildSerially
+ default:
+ return fmt.Errorf("unknown BuilderMode option: %q", c)
+ }
+ }
+ *m = mode
+ return nil
+}
+
+// Get returns m.
+func (m BuilderMode) Get() interface{} { return m }
diff --git a/vendor/honnef.co/go/tools/ssa/print.go b/vendor/honnef.co/go/tools/ssa/print.go
new file mode 100644
index 0000000..a7deb88
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/print.go
@@ -0,0 +1,433 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file implements the String() methods for all Value and
+// Instruction types.
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "io"
+ "reflect"
+ "sort"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// relName returns the name of v relative to i.
+// In most cases, this is identical to v.Name(), but references to
+// Functions (including methods) and Globals use RelString and
+// all types are displayed with relType, so that only cross-package
+// references are package-qualified.
+//
+func relName(v Value, i Instruction) string {
+ var from *types.Package
+ if i != nil {
+ from = i.Parent().pkg()
+ }
+ switch v := v.(type) {
+ case Member: // *Function or *Global
+ return v.RelString(from)
+ case *Const:
+ return v.RelString(from)
+ }
+ return v.Name()
+}
+
+func relType(t types.Type, from *types.Package) string {
+ return types.TypeString(t, types.RelativeTo(from))
+}
+
+func relString(m Member, from *types.Package) string {
+ // NB: not all globals have an Object (e.g. init$guard),
+ // so use Package().Object not Object.Package().
+ if pkg := m.Package().Pkg; pkg != nil && pkg != from {
+ return fmt.Sprintf("%s.%s", pkg.Path(), m.Name())
+ }
+ return m.Name()
+}
+
+// Value.String()
+//
+// This method is provided only for debugging.
+// It never appears in disassembly, which uses Value.Name().
+
+func (v *Parameter) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from))
+}
+
+func (v *FreeVar) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from))
+}
+
+func (v *Builtin) String() string {
+ return fmt.Sprintf("builtin %s", v.Name())
+}
+
+// Instruction.String()
+
+func (v *Alloc) String() string {
+ op := "local"
+ if v.Heap {
+ op = "new"
+ }
+ from := v.Parent().pkg()
+ return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment)
+}
+
+func (v *Phi) String() string {
+ var b bytes.Buffer
+ b.WriteString("φ [")
+ for i, edge := range v.Edges {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ // Be robust against malformed CFG.
+ block := -1
+ if v.block != nil && i < len(v.block.Preds) {
+ block = v.block.Preds[i].Index
+ }
+ fmt.Fprintf(&b, "%d: ", block)
+ edgeVal := "<nil>" // be robust
+ if edge != nil {
+ edgeVal = relName(edge, v)
+ }
+ b.WriteString(edgeVal)
+ }
+ b.WriteString("]")
+ if v.Comment != "" {
+ b.WriteString(" #")
+ b.WriteString(v.Comment)
+ }
+ return b.String()
+}
+
+func printCall(v *CallCommon, prefix string, instr Instruction) string {
+ var b bytes.Buffer
+ b.WriteString(prefix)
+ if !v.IsInvoke() {
+ b.WriteString(relName(v.Value, instr))
+ } else {
+ fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name())
+ }
+ b.WriteString("(")
+ for i, arg := range v.Args {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(arg, instr))
+ }
+ if v.Signature().Variadic() {
+ b.WriteString("...")
+ }
+ b.WriteString(")")
+ return b.String()
+}
+
+func (c *CallCommon) String() string {
+ return printCall(c, "", nil)
+}
+
+func (v *Call) String() string {
+ return printCall(&v.Call, "", v)
+}
+
+func (v *BinOp) String() string {
+ return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v))
+}
+
+func (v *UnOp) String() string {
+ return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk))
+}
+
+func printConv(prefix string, v, x Value) string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("%s %s <- %s (%s)",
+ prefix,
+ relType(v.Type(), from),
+ relType(x.Type(), from),
+ relName(x, v.(Instruction)))
+}
+
+func (v *ChangeType) String() string { return printConv("changetype", v, v.X) }
+func (v *Convert) String() string { return printConv("convert", v, v.X) }
+func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) }
+func (v *MakeInterface) String() string { return printConv("make", v, v.X) }
+
+func (v *MakeClosure) String() string {
+ var b bytes.Buffer
+ fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
+ if v.Bindings != nil {
+ b.WriteString(" [")
+ for i, c := range v.Bindings {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(c, v))
+ }
+ b.WriteString("]")
+ }
+ return b.String()
+}
+
+func (v *MakeSlice) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("make %s %s %s",
+ relType(v.Type(), from),
+ relName(v.Len, v),
+ relName(v.Cap, v))
+}
+
+func (v *Slice) String() string {
+ var b bytes.Buffer
+ b.WriteString("slice ")
+ b.WriteString(relName(v.X, v))
+ b.WriteString("[")
+ if v.Low != nil {
+ b.WriteString(relName(v.Low, v))
+ }
+ b.WriteString(":")
+ if v.High != nil {
+ b.WriteString(relName(v.High, v))
+ }
+ if v.Max != nil {
+ b.WriteString(":")
+ b.WriteString(relName(v.Max, v))
+ }
+ b.WriteString("]")
+ return b.String()
+}
+
+func (v *MakeMap) String() string {
+ res := ""
+ if v.Reserve != nil {
+ res = relName(v.Reserve, v)
+ }
+ from := v.Parent().pkg()
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), res)
+}
+
+func (v *MakeChan) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v))
+}
+
+func (v *FieldAddr) String() string {
+ st := deref(v.X.Type()).Underlying().(*types.Struct)
+ // Be robust against a bad index.
+ name := "?"
+ if 0 <= v.Field && v.Field < st.NumFields() {
+ name = st.Field(v.Field).Name()
+ }
+ return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *Field) String() string {
+ st := v.X.Type().Underlying().(*types.Struct)
+ // Be robust against a bad index.
+ name := "?"
+ if 0 <= v.Field && v.Field < st.NumFields() {
+ name = st.Field(v.Field).Name()
+ }
+ return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *IndexAddr) String() string {
+ return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Index) String() string {
+ return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Lookup) String() string {
+ return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk))
+}
+
+func (v *Range) String() string {
+ return "range " + relName(v.X, v)
+}
+
+func (v *Next) String() string {
+ return "next " + relName(v.Iter, v)
+}
+
+func (v *TypeAssert) String() string {
+ from := v.Parent().pkg()
+ return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from))
+}
+
+func (v *Extract) String() string {
+ return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index)
+}
+
+func (s *Jump) String() string {
+ // Be robust against malformed CFG.
+ block := -1
+ if s.block != nil && len(s.block.Succs) == 1 {
+ block = s.block.Succs[0].Index
+ }
+ return fmt.Sprintf("jump %d", block)
+}
+
+func (s *If) String() string {
+ // Be robust against malformed CFG.
+ tblock, fblock := -1, -1
+ if s.block != nil && len(s.block.Succs) == 2 {
+ tblock = s.block.Succs[0].Index
+ fblock = s.block.Succs[1].Index
+ }
+ return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock)
+}
+
+func (s *Go) String() string {
+ return printCall(&s.Call, "go ", s)
+}
+
+func (s *Panic) String() string {
+ return "panic " + relName(s.X, s)
+}
+
+func (s *Return) String() string {
+ var b bytes.Buffer
+ b.WriteString("return")
+ for i, r := range s.Results {
+ if i == 0 {
+ b.WriteString(" ")
+ } else {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(r, s))
+ }
+ return b.String()
+}
+
+func (*RunDefers) String() string {
+ return "rundefers"
+}
+
+func (s *Send) String() string {
+ return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s))
+}
+
+func (s *Defer) String() string {
+ return printCall(&s.Call, "defer ", s)
+}
+
+func (s *Select) String() string {
+ var b bytes.Buffer
+ for i, st := range s.States {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ if st.Dir == types.RecvOnly {
+ b.WriteString("<-")
+ b.WriteString(relName(st.Chan, s))
+ } else {
+ b.WriteString(relName(st.Chan, s))
+ b.WriteString("<-")
+ b.WriteString(relName(st.Send, s))
+ }
+ }
+ non := ""
+ if !s.Blocking {
+ non = "non"
+ }
+ return fmt.Sprintf("select %sblocking [%s]", non, b.String())
+}
+
+func (s *Store) String() string {
+ return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s))
+}
+
+func (s *BlankStore) String() string {
+ return fmt.Sprintf("_ = %s", relName(s.Val, s))
+}
+
+func (s *MapUpdate) String() string {
+ return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
+}
+
+func (s *DebugRef) String() string {
+ p := s.Parent().Prog.Fset.Position(s.Pos())
+ var descr interface{}
+ if s.object != nil {
+ descr = s.object // e.g. "var x int"
+ } else {
+ descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr"
+ }
+ var addr string
+ if s.IsAddr {
+ addr = "address of "
+ }
+ return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name())
+}
+
+func (p *Package) String() string {
+ return "package " + p.Pkg.Path()
+}
+
+var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer
+
+func (p *Package) WriteTo(w io.Writer) (int64, error) {
+ var buf bytes.Buffer
+ WritePackage(&buf, p)
+ n, err := w.Write(buf.Bytes())
+ return int64(n), err
+}
+
+// WritePackage writes to buf a human-readable summary of p.
+func WritePackage(buf *bytes.Buffer, p *Package) {
+ fmt.Fprintf(buf, "%s:\n", p)
+
+ var names []string
+ maxname := 0
+ for name := range p.Members {
+ if l := len(name); l > maxname {
+ maxname = l
+ }
+ names = append(names, name)
+ }
+
+ from := p.Pkg
+ sort.Strings(names)
+ for _, name := range names {
+ switch mem := p.Members[name].(type) {
+ case *NamedConst:
+ fmt.Fprintf(buf, " const %-*s %s = %s\n",
+ maxname, name, mem.Name(), mem.Value.RelString(from))
+
+ case *Function:
+ fmt.Fprintf(buf, " func %-*s %s\n",
+ maxname, name, relType(mem.Type(), from))
+
+ case *Type:
+ fmt.Fprintf(buf, " type %-*s %s\n",
+ maxname, name, relType(mem.Type().Underlying(), from))
+ for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) {
+ fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from)))
+ }
+
+ case *Global:
+ fmt.Fprintf(buf, " var %-*s %s\n",
+ maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from))
+ }
+ }
+
+ fmt.Fprintf(buf, "\n")
+}
+
+func commaOk(x bool) string {
+ if x {
+ return ",ok"
+ }
+ return ""
+}
diff --git a/vendor/honnef.co/go/tools/ssa/sanity.go b/vendor/honnef.co/go/tools/ssa/sanity.go
new file mode 100644
index 0000000..bd7377c
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/sanity.go
@@ -0,0 +1,523 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// An optional pass for sanity-checking invariants of the SSA representation.
+// Currently it checks CFG invariants but little at the instruction level.
+
+import (
+ "fmt"
+ "go/types"
+ "io"
+ "os"
+ "strings"
+)
+
+type sanity struct {
+ reporter io.Writer
+ fn *Function
+ block *BasicBlock
+ instrs map[Instruction]struct{}
+ insane bool
+}
+
+// sanityCheck performs integrity checking of the SSA representation
+// of the function fn and returns true if it was valid. Diagnostics
+// are written to reporter if non-nil, os.Stderr otherwise. Some
+// diagnostics are only warnings and do not imply a negative result.
+//
+// Sanity-checking is intended to facilitate the debugging of code
+// transformation passes.
+//
+func sanityCheck(fn *Function, reporter io.Writer) bool {
+ if reporter == nil {
+ reporter = os.Stderr
+ }
+ return (&sanity{reporter: reporter}).checkFunction(fn)
+}
+
+// mustSanityCheck is like sanityCheck but panics instead of returning
+// a negative result.
+//
+func mustSanityCheck(fn *Function, reporter io.Writer) {
+ if !sanityCheck(fn, reporter) {
+ fn.WriteTo(os.Stderr)
+ panic("SanityCheck failed")
+ }
+}
+
+func (s *sanity) diagnostic(prefix, format string, args ...interface{}) {
+ fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn)
+ if s.block != nil {
+ fmt.Fprintf(s.reporter, ", block %s", s.block)
+ }
+ io.WriteString(s.reporter, ": ")
+ fmt.Fprintf(s.reporter, format, args...)
+ io.WriteString(s.reporter, "\n")
+}
+
+func (s *sanity) errorf(format string, args ...interface{}) {
+ s.insane = true
+ s.diagnostic("Error", format, args...)
+}
+
+func (s *sanity) warnf(format string, args ...interface{}) {
+ s.diagnostic("Warning", format, args...)
+}
+
+// findDuplicate returns an arbitrary basic block that appeared more
+// than once in blocks, or nil if all were unique.
+func findDuplicate(blocks []*BasicBlock) *BasicBlock {
+ if len(blocks) < 2 {
+ return nil
+ }
+ if blocks[0] == blocks[1] {
+ return blocks[0]
+ }
+ // Slow path:
+ m := make(map[*BasicBlock]bool)
+ for _, b := range blocks {
+ if m[b] {
+ return b
+ }
+ m[b] = true
+ }
+ return nil
+}
+
+func (s *sanity) checkInstr(idx int, instr Instruction) {
+ switch instr := instr.(type) {
+ case *If, *Jump, *Return, *Panic:
+ s.errorf("control flow instruction not at end of block")
+ case *Phi:
+ if idx == 0 {
+ // It suffices to apply this check to just the first phi node.
+ if dup := findDuplicate(s.block.Preds); dup != nil {
+ s.errorf("phi node in block with duplicate predecessor %s", dup)
+ }
+ } else {
+ prev := s.block.Instrs[idx-1]
+ if _, ok := prev.(*Phi); !ok {
+ s.errorf("Phi instruction follows a non-Phi: %T", prev)
+ }
+ }
+ if ne, np := len(instr.Edges), len(s.block.Preds); ne != np {
+ s.errorf("phi node has %d edges but %d predecessors", ne, np)
+
+ } else {
+ for i, e := range instr.Edges {
+ if e == nil {
+ s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i])
+ }
+ }
+ }
+
+ case *Alloc:
+ if !instr.Heap {
+ found := false
+ for _, l := range s.fn.Locals {
+ if l == instr {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr)
+ }
+ }
+
+ case *BinOp:
+ case *Call:
+ case *ChangeInterface:
+ case *ChangeType:
+ case *Convert:
+ if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok {
+ if _, ok := instr.Type().Underlying().(*types.Basic); !ok {
+ s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type())
+ }
+ }
+
+ case *Defer:
+ case *Extract:
+ case *Field:
+ case *FieldAddr:
+ case *Go:
+ case *Index:
+ case *IndexAddr:
+ case *Lookup:
+ case *MakeChan:
+ case *MakeClosure:
+ numFree := len(instr.Fn.(*Function).FreeVars)
+ numBind := len(instr.Bindings)
+ if numFree != numBind {
+ s.errorf("MakeClosure has %d Bindings for function %s with %d free vars",
+ numBind, instr.Fn, numFree)
+
+ }
+ if recv := instr.Type().(*types.Signature).Recv(); recv != nil {
+ s.errorf("MakeClosure's type includes receiver %s", recv.Type())
+ }
+
+ case *MakeInterface:
+ case *MakeMap:
+ case *MakeSlice:
+ case *MapUpdate:
+ case *Next:
+ case *Range:
+ case *RunDefers:
+ case *Select:
+ case *Send:
+ case *Slice:
+ case *Store:
+ case *TypeAssert:
+ case *UnOp:
+ case *DebugRef:
+ case *BlankStore:
+ case *Sigma:
+ // TODO(adonovan): implement checks.
+ default:
+ panic(fmt.Sprintf("Unknown instruction type: %T", instr))
+ }
+
+ if call, ok := instr.(CallInstruction); ok {
+ if call.Common().Signature() == nil {
+ s.errorf("nil signature: %s", call)
+ }
+ }
+
+ // Check that value-defining instructions have valid types
+ // and a valid referrer list.
+ if v, ok := instr.(Value); ok {
+ t := v.Type()
+ if t == nil {
+ s.errorf("no type: %s = %s", v.Name(), v)
+ } else if t == tRangeIter {
+ // not a proper type; ignore.
+ } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
+ s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
+ }
+ s.checkReferrerList(v)
+ }
+
+ // Untyped constants are legal as instruction Operands(),
+ // for example:
+ // _ = "foo"[0]
+ // or:
+ // if wordsize==64 {...}
+
+ // All other non-Instruction Values can be found via their
+ // enclosing Function or Package.
+}
+
+func (s *sanity) checkFinalInstr(idx int, instr Instruction) {
+ switch instr := instr.(type) {
+ case *If:
+ if nsuccs := len(s.block.Succs); nsuccs != 2 {
+ s.errorf("If-terminated block has %d successors; expected 2", nsuccs)
+ return
+ }
+ if s.block.Succs[0] == s.block.Succs[1] {
+ s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0])
+ return
+ }
+
+ case *Jump:
+ if nsuccs := len(s.block.Succs); nsuccs != 1 {
+ s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs)
+ return
+ }
+
+ case *Return:
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Return-terminated block has %d successors; expected none", nsuccs)
+ return
+ }
+ if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na {
+ s.errorf("%d-ary return in %d-ary function", na, nf)
+ }
+
+ case *Panic:
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Panic-terminated block has %d successors; expected none", nsuccs)
+ return
+ }
+
+ default:
+ s.errorf("non-control flow instruction at end of block")
+ }
+}
+
+func (s *sanity) checkBlock(b *BasicBlock, index int) {
+ s.block = b
+
+ if b.Index != index {
+ s.errorf("block has incorrect Index %d", b.Index)
+ }
+ if b.parent != s.fn {
+ s.errorf("block has incorrect parent %s", b.parent)
+ }
+
+ // Check all blocks are reachable.
+ // (The entry block is always implicitly reachable,
+ // as is the Recover block, if any.)
+ if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 {
+ s.warnf("unreachable block")
+ if b.Instrs == nil {
+ // Since this block is about to be pruned,
+ // tolerating transient problems in it
+ // simplifies other optimizations.
+ return
+ }
+ }
+
+ // Check predecessor and successor relations are dual,
+ // and that all blocks in CFG belong to same function.
+ for _, a := range b.Preds {
+ found := false
+ for _, bb := range a.Succs {
+ if bb == b {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs)
+ }
+ if a.parent != s.fn {
+ s.errorf("predecessor %s belongs to different function %s", a, a.parent)
+ }
+ }
+ for _, c := range b.Succs {
+ found := false
+ for _, bb := range c.Preds {
+ if bb == b {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds)
+ }
+ if c.parent != s.fn {
+ s.errorf("successor %s belongs to different function %s", c, c.parent)
+ }
+ }
+
+ // Check each instruction is sane.
+ n := len(b.Instrs)
+ if n == 0 {
+ s.errorf("basic block contains no instructions")
+ }
+ var rands [10]*Value // reuse storage
+ for j, instr := range b.Instrs {
+ if instr == nil {
+ s.errorf("nil instruction at index %d", j)
+ continue
+ }
+ if b2 := instr.Block(); b2 == nil {
+ s.errorf("nil Block() for instruction at index %d", j)
+ continue
+ } else if b2 != b {
+ s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j)
+ continue
+ }
+ if j < n-1 {
+ s.checkInstr(j, instr)
+ } else {
+ s.checkFinalInstr(j, instr)
+ }
+
+ // Check Instruction.Operands.
+ operands:
+ for i, op := range instr.Operands(rands[:0]) {
+ if op == nil {
+ s.errorf("nil operand pointer %d of %s", i, instr)
+ continue
+ }
+ val := *op
+ if val == nil {
+ continue // a nil operand is ok
+ }
+
+ // Check that "untyped" types only appear on constant operands.
+ if _, ok := (*op).(*Const); !ok {
+ if basic, ok := (*op).Type().(*types.Basic); ok {
+ if basic.Info()&types.IsUntyped != 0 {
+ s.errorf("operand #%d of %s is untyped: %s", i, instr, basic)
+ }
+ }
+ }
+
+ // Check that Operands that are also Instructions belong to same function.
+ // TODO(adonovan): also check their block dominates block b.
+ if val, ok := val.(Instruction); ok {
+ if val.Parent() != s.fn {
+ s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent())
+ }
+ }
+
+ // Check that each function-local operand of
+ // instr refers back to instr. (NB: quadratic)
+ switch val := val.(type) {
+ case *Const, *Global, *Builtin:
+ continue // not local
+ case *Function:
+ if val.parent == nil {
+ continue // only anon functions are local
+ }
+ }
+
+ // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined.
+
+ if refs := val.Referrers(); refs != nil {
+ for _, ref := range *refs {
+ if ref == instr {
+ continue operands
+ }
+ }
+ s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val)
+ } else {
+ s.errorf("operand %d of %s (%s) has no referrers", i, instr, val)
+ }
+ }
+ }
+}
+
+func (s *sanity) checkReferrerList(v Value) {
+ refs := v.Referrers()
+ if refs == nil {
+ s.errorf("%s has missing referrer list", v.Name())
+ return
+ }
+ for i, ref := range *refs {
+ if _, ok := s.instrs[ref]; !ok {
+ s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
+ }
+ }
+}
+
+func (s *sanity) checkFunction(fn *Function) bool {
+ // TODO(adonovan): check Function invariants:
+ // - check params match signature
+ // - check transient fields are nil
+ // - warn if any fn.Locals do not appear among block instructions.
+ s.fn = fn
+ if fn.Prog == nil {
+ s.errorf("nil Prog")
+ }
+
+ fn.String() // must not crash
+ fn.RelString(fn.pkg()) // must not crash
+
+ // All functions have a package, except delegates (which are
+ // shared across packages, or duplicated as weak symbols in a
+ // separate-compilation model), and error.Error.
+ if fn.Pkg == nil {
+ if strings.HasPrefix(fn.Synthetic, "wrapper ") ||
+ strings.HasPrefix(fn.Synthetic, "bound ") ||
+ strings.HasPrefix(fn.Synthetic, "thunk ") ||
+ strings.HasSuffix(fn.name, "Error") {
+ // ok
+ } else {
+ s.errorf("nil Pkg")
+ }
+ }
+ if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn {
+ s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
+ }
+ for i, l := range fn.Locals {
+ if l.Parent() != fn {
+ s.errorf("Local %s at index %d has wrong parent", l.Name(), i)
+ }
+ if l.Heap {
+ s.errorf("Local %s at index %d has Heap flag set", l.Name(), i)
+ }
+ }
+ // Build the set of valid referrers.
+ s.instrs = make(map[Instruction]struct{})
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ s.instrs[instr] = struct{}{}
+ }
+ }
+ for i, p := range fn.Params {
+ if p.Parent() != fn {
+ s.errorf("Param %s at index %d has wrong parent", p.Name(), i)
+ }
+ s.checkReferrerList(p)
+ }
+ for i, fv := range fn.FreeVars {
+ if fv.Parent() != fn {
+ s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i)
+ }
+ s.checkReferrerList(fv)
+ }
+
+ if fn.Blocks != nil && len(fn.Blocks) == 0 {
+ // Function _had_ blocks (so it's not external) but
+ // they were "optimized" away, even the entry block.
+ s.errorf("Blocks slice is non-nil but empty")
+ }
+ for i, b := range fn.Blocks {
+ if b == nil {
+ s.warnf("nil *BasicBlock at f.Blocks[%d]", i)
+ continue
+ }
+ s.checkBlock(b, i)
+ }
+ if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover {
+ s.errorf("Recover block is not in Blocks slice")
+ }
+
+ s.block = nil
+ for i, anon := range fn.AnonFuncs {
+ if anon.Parent() != fn {
+ s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent())
+ }
+ }
+ s.fn = nil
+ return !s.insane
+}
+
+// sanityCheckPackage checks invariants of packages upon creation.
+// It does not require that the package is built.
+// Unlike sanityCheck (for functions), it just panics at the first error.
+func sanityCheckPackage(pkg *Package) {
+ if pkg.Pkg == nil {
+ panic(fmt.Sprintf("Package %s has no Object", pkg))
+ }
+ pkg.String() // must not crash
+
+ for name, mem := range pkg.Members {
+ if name != mem.Name() {
+ panic(fmt.Sprintf("%s: %T.Name() = %s, want %s",
+ pkg.Pkg.Path(), mem, mem.Name(), name))
+ }
+ obj := mem.Object()
+ if obj == nil {
+ // This check is sound because fields
+ // {Global,Function}.object have type
+ // types.Object. (If they were declared as
+ // *types.{Var,Func}, we'd have a non-empty
+ // interface containing a nil pointer.)
+
+ continue // not all members have typechecker objects
+ }
+ if obj.Name() != name {
+ if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") {
+ // Ok. The name of a declared init function varies between
+ // its types.Func ("init") and its ssa.Function ("init#%d").
+ } else {
+ panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s",
+ pkg.Pkg.Path(), mem, obj.Name(), name))
+ }
+ }
+ if obj.Pos() != mem.Pos() {
+ panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos()))
+ }
+ }
+}
diff --git a/vendor/honnef.co/go/tools/ssa/source.go b/vendor/honnef.co/go/tools/ssa/source.go
new file mode 100644
index 0000000..e17e023
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/source.go
@@ -0,0 +1,299 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file defines utilities for working with source positions
+// or source-level named entities ("objects").
+
+// TODO(adonovan): test that {Value,Instruction}.Pos() positions match
+// the originating syntax, as specified.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+ "log"
+)
+
+// EnclosingFunction returns the function that contains the syntax
+// node denoted by path.
+//
+// Syntax associated with package-level variable specifications is
+// enclosed by the package's init() function.
+//
+// Returns nil if not found; reasons might include:
+// - the node is not enclosed by any function.
+// - the node is within an anonymous function (FuncLit) and
+// its SSA function has not been created yet
+// (pkg.Build() has not yet been called).
+//
+func EnclosingFunction(pkg *Package, path []ast.Node) *Function {
+ // Start with package-level function...
+ fn := findEnclosingPackageLevelFunction(pkg, path)
+ if fn == nil {
+ return nil // not in any function
+ }
+
+ // ...then walk down the nested anonymous functions.
+ n := len(path)
+outer:
+ for i := range path {
+ if lit, ok := path[n-1-i].(*ast.FuncLit); ok {
+ for _, anon := range fn.AnonFuncs {
+ if anon.Pos() == lit.Type.Func {
+ fn = anon
+ continue outer
+ }
+ }
+ // SSA function not found:
+ // - package not yet built, or maybe
+ // - builder skipped FuncLit in dead block
+ // (in principle; but currently the Builder
+ // generates even dead FuncLits).
+ return nil
+ }
+ }
+ return fn
+}
+
+// HasEnclosingFunction returns true if the AST node denoted by path
+// is contained within the declaration of some function or
+// package-level variable.
+//
+// Unlike EnclosingFunction, the behaviour of this function does not
+// depend on whether SSA code for pkg has been built, so it can be
+// used to quickly reject check inputs that will cause
+// EnclosingFunction to fail, prior to SSA building.
+//
+func HasEnclosingFunction(pkg *Package, path []ast.Node) bool {
+ return findEnclosingPackageLevelFunction(pkg, path) != nil
+}
+
+// findEnclosingPackageLevelFunction returns the Function
+// corresponding to the package-level function enclosing path.
+//
+func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function {
+ if n := len(path); n >= 2 { // [... {Gen,Func}Decl File]
+ switch decl := path[n-2].(type) {
+ case *ast.GenDecl:
+ if decl.Tok == token.VAR && n >= 3 {
+ // Package-level 'var' initializer.
+ return pkg.init
+ }
+
+ case *ast.FuncDecl:
+ if decl.Recv == nil && decl.Name.Name == "init" {
+ // Explicit init() function.
+ for _, b := range pkg.init.Blocks {
+ for _, instr := range b.Instrs {
+ if instr, ok := instr.(*Call); ok {
+ if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos {
+ return callee
+ }
+ }
+ }
+ }
+ // Hack: return non-nil when SSA is not yet
+ // built so that HasEnclosingFunction works.
+ return pkg.init
+ }
+ // Declared function/method.
+ return findNamedFunc(pkg, decl.Name.NamePos)
+ }
+ }
+ return nil // not in any function
+}
+
+// findNamedFunc returns the named function whose FuncDecl.Ident is at
+// position pos.
+//
+func findNamedFunc(pkg *Package, pos token.Pos) *Function {
+ // Look at all package members and method sets of named types.
+ // Not very efficient.
+ for _, mem := range pkg.Members {
+ switch mem := mem.(type) {
+ case *Function:
+ if mem.Pos() == pos {
+ return mem
+ }
+ case *Type:
+ mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type()))
+ for i, n := 0, mset.Len(); i < n; i++ {
+ // Don't call Program.Method: avoid creating wrappers.
+ obj := mset.At(i).Obj().(*types.Func)
+ if obj.Pos() == pos {
+ if pkg.values[obj] == nil {
+ log.Println(obj)
+ }
+ return pkg.values[obj].(*Function)
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// ValueForExpr returns the SSA Value that corresponds to non-constant
+// expression e.
+//
+// It returns nil if no value was found, e.g.
+// - the expression is not lexically contained within f;
+// - f was not built with debug information; or
+// - e is a constant expression. (For efficiency, no debug
+// information is stored for constants. Use
+// go/types.Info.Types[e].Value instead.)
+// - e is a reference to nil or a built-in function.
+// - the value was optimised away.
+//
+// If e is an addressable expression used in an lvalue context,
+// value is the address denoted by e, and isAddr is true.
+//
+// The types of e (or &e, if isAddr) and the result are equal
+// (modulo "untyped" bools resulting from comparisons).
+//
+// (Tip: to find the ssa.Value given a source position, use
+// importer.PathEnclosingInterval to locate the ast.Node, then
+// EnclosingFunction to locate the Function, then ValueForExpr to find
+// the ssa.Value.)
+//
+func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
+ if f.debugInfo() { // (opt)
+ e = unparen(e)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if ref, ok := instr.(*DebugRef); ok {
+ if ref.Expr == e {
+ return ref.X, ref.IsAddr
+ }
+ }
+ }
+ }
+ }
+ return
+}
+
+// --- Lookup functions for source-level named entities (types.Objects) ---
+
+// Package returns the SSA Package corresponding to the specified
+// type-checker package object.
+// It returns nil if no such SSA package has been created.
+//
+func (prog *Program) Package(obj *types.Package) *Package {
+ return prog.packages[obj]
+}
+
+// packageLevelValue returns the package-level value corresponding to
+// the specified named object, which may be a package-level const
+// (*Const), var (*Global) or func (*Function) of some package in
+// prog. It returns nil if the object is not found.
+//
+func (prog *Program) packageLevelValue(obj types.Object) Value {
+ if pkg, ok := prog.packages[obj.Pkg()]; ok {
+ return pkg.values[obj]
+ }
+ return nil
+}
+
+// FuncValue returns the concrete Function denoted by the source-level
+// named function obj, or nil if obj denotes an interface method.
+//
+// TODO(adonovan): check the invariant that obj.Type() matches the
+// result's Signature, both in the params/results and in the receiver.
+//
+func (prog *Program) FuncValue(obj *types.Func) *Function {
+ fn, _ := prog.packageLevelValue(obj).(*Function)
+ return fn
+}
+
+// ConstValue returns the SSA Value denoted by the source-level named
+// constant obj.
+//
+func (prog *Program) ConstValue(obj *types.Const) *Const {
+ // TODO(adonovan): opt: share (don't reallocate)
+ // Consts for const objects and constant ast.Exprs.
+
+ // Universal constant? {true,false,nil}
+ if obj.Parent() == types.Universe {
+ return NewConst(obj.Val(), obj.Type())
+ }
+ // Package-level named constant?
+ if v := prog.packageLevelValue(obj); v != nil {
+ return v.(*Const)
+ }
+ return NewConst(obj.Val(), obj.Type())
+}
+
+// VarValue returns the SSA Value that corresponds to a specific
+// identifier denoting the source-level named variable obj.
+//
+// VarValue returns nil if a local variable was not found, perhaps
+// because its package was not built, the debug information was not
+// requested during SSA construction, or the value was optimized away.
+//
+// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval),
+// and that ident must resolve to obj.
+//
+// pkg is the package enclosing the reference. (A reference to a var
+// always occurs within a function, so we need to know where to find it.)
+//
+// If the identifier is a field selector and its base expression is
+// non-addressable, then VarValue returns the value of that field.
+// For example:
+// func f() struct {x int}
+// f().x // VarValue(x) returns a *Field instruction of type int
+//
+// All other identifiers denote addressable locations (variables).
+// For them, VarValue may return either the variable's address or its
+// value, even when the expression is evaluated only for its value; the
+// situation is reported by isAddr, the second component of the result.
+//
+// If !isAddr, the returned value is the one associated with the
+// specific identifier. For example,
+// var x int // VarValue(x) returns Const 0 here
+// x = 1 // VarValue(x) returns Const 1 here
+//
+// It is not specified whether the value or the address is returned in
+// any particular case, as it may depend upon optimizations performed
+// during SSA code generation, such as registerization, constant
+// folding, avoidance of materialization of subexpressions, etc.
+//
+func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) {
+ // All references to a var are local to some function, possibly init.
+ fn := EnclosingFunction(pkg, ref)
+ if fn == nil {
+ return // e.g. def of struct field; SSA not built?
+ }
+
+ id := ref[0].(*ast.Ident)
+
+ // Defining ident of a parameter?
+ if id.Pos() == obj.Pos() {
+ for _, param := range fn.Params {
+ if param.Object() == obj {
+ return param, false
+ }
+ }
+ }
+
+ // Other ident?
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ if dr, ok := instr.(*DebugRef); ok {
+ if dr.Pos() == id.Pos() {
+ return dr.X, dr.IsAddr
+ }
+ }
+ }
+ }
+
+ // Defining ident of package-level var?
+ if v := prog.packageLevelValue(obj); v != nil {
+ return v.(*Global), true
+ }
+
+ return // e.g. debug info not requested, or var optimized away
+}
diff --git a/vendor/honnef.co/go/tools/ssa/ssa.go b/vendor/honnef.co/go/tools/ssa/ssa.go
new file mode 100644
index 0000000..ed280c7
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/ssa.go
@@ -0,0 +1,1751 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This package defines a high-level intermediate representation for
+// Go programs using static single-assignment (SSA) form.
+
+import (
+ "fmt"
+ "go/ast"
+ exact "go/constant"
+ "go/token"
+ "go/types"
+ "sync"
+
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// A Program is a partial or complete Go program converted to SSA form.
+type Program struct {
+ Fset *token.FileSet // position information for the files of this Program
+ imported map[string]*Package // all importable Packages, keyed by import path
+ packages map[*types.Package]*Package // all loaded Packages, keyed by object
+ mode BuilderMode // set of mode bits for SSA construction
+ MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets
+
+ methodsMu sync.Mutex // guards the following maps:
+ methodSets typeutil.Map // maps type to its concrete methodSet
+ runtimeTypes typeutil.Map // types for which rtypes are needed
+ canon typeutil.Map // type canonicalization map
+ bounds map[*types.Func]*Function // bounds for curried x.Method closures
+ thunks map[selectionKey]*Function // thunks for T.Method expressions
+}
+
+// A Package is a single analyzed Go package containing Members for
+// all package-level functions, variables, constants and types it
+// declares. These may be accessed directly via Members, or via the
+// type-specific accessor methods Func, Type, Var and Const.
+//
+// Members also contains entries for "init" (the synthetic package
+// initializer) and "init#%d", the nth declared init function,
+// and unspecified other things too.
+//
+type Package struct {
+ Prog *Program // the owning program
+ Pkg *types.Package // the corresponding go/types.Package
+ Members map[string]Member // all package members keyed by name (incl. init and init#%d)
+ values map[types.Object]Value // package members (incl. types and methods), keyed by object
+ init *Function // Func("init"); the package's init function
+ debug bool // include full debug info in this package
+
+ // The following fields are set transiently, then cleared
+ // after building.
+ buildOnce sync.Once // ensures package building occurs once
+ ninit int32 // number of init functions
+ info *types.Info // package type information
+ files []*ast.File // package ASTs
+}
+
+// A Member is a member of a Go package, implemented by *NamedConst,
+// *Global, *Function, or *Type; they are created by package-level
+// const, var, func and type declarations respectively.
+//
+type Member interface {
+ Name() string // declared name of the package member
+ String() string // package-qualified name of the package member
+ RelString(*types.Package) string // like String, but relative refs are unqualified
+ Object() types.Object // typechecker's object for this member, if any
+ Pos() token.Pos // position of member's declaration, if known
+ Type() types.Type // type of the package member
+ Token() token.Token // token.{VAR,FUNC,CONST,TYPE}
+ Package() *Package // the containing package
+}
+
+// A Type is a Member of a Package representing a package-level named type.
+//
+// Type() returns a *types.Named.
+//
+type Type struct {
+ object *types.TypeName
+ pkg *Package
+}
+
+// A NamedConst is a Member of a Package representing a package-level
+// named constant.
+//
+// Pos() returns the position of the declaring ast.ValueSpec.Names[*]
+// identifier.
+//
+// NB: a NamedConst is not a Value; it contains a constant Value, which
+// it augments with the name and position of its 'const' declaration.
+//
+type NamedConst struct {
+ object *types.Const
+ Value *Const
+ pos token.Pos
+ pkg *Package
+}
+
+// A Value is an SSA value that can be referenced by an instruction.
+type Value interface {
+ // Name returns the name of this value, and determines how
+ // this Value appears when used as an operand of an
+ // Instruction.
+ //
+ // This is the same as the source name for Parameters,
+ // Builtins, Functions, FreeVars, Globals.
+ // For constants, it is a representation of the constant's value
+ // and type. For all other Values this is the name of the
+ // virtual register defined by the instruction.
+ //
+ // The name of an SSA Value is not semantically significant,
+ // and may not even be unique within a function.
+ Name() string
+
+ // If this value is an Instruction, String returns its
+ // disassembled form; otherwise it returns unspecified
+ // human-readable information about the Value, such as its
+ // kind, name and type.
+ String() string
+
+ // Type returns the type of this value. Many instructions
+ // (e.g. IndexAddr) change their behaviour depending on the
+ // types of their operands.
+ Type() types.Type
+
+ // Parent returns the function to which this Value belongs.
+ // It returns nil for named Functions, Builtin, Const and Global.
+ Parent() *Function
+
+ // Referrers returns the list of instructions that have this
+ // value as one of their operands; it may contain duplicates
+ // if an instruction has a repeated operand.
+ //
+ // Referrers actually returns a pointer through which the
+ // caller may perform mutations to the object's state.
+ //
+ // Referrers is currently only defined if Parent()!=nil,
+ // i.e. for the function-local values FreeVar, Parameter,
+ // Functions (iff anonymous) and all value-defining instructions.
+ // It returns nil for named Functions, Builtin, Const and Global.
+ //
+ // Instruction.Operands contains the inverse of this relation.
+ Referrers() *[]Instruction
+
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this value,
+ // or token.NoPos if it was not explicit in the source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Lparen
+ // for an *ast.CallExpr. This permits a compact but
+ // approximate mapping from Values to source positions for use
+ // in diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Value
+ // corresponds to an ast.Expr; use Function.ValueForExpr
+ // instead. NB: it requires that the function was built with
+ // debug information.)
+ Pos() token.Pos
+}
+
+// An Instruction is an SSA instruction that computes a new Value or
+// has some effect.
+//
+// An Instruction that defines a value (e.g. BinOp) also implements
+// the Value interface; an Instruction that only has an effect (e.g. Store)
+// does not.
+//
+type Instruction interface {
+ // String returns the disassembled form of this value.
+ //
+ // Examples of Instructions that are Values:
+ // "x + y" (BinOp)
+ // "len([])" (Call)
+ // Note that the name of the Value is not printed.
+ //
+ // Examples of Instructions that are not Values:
+ // "return x" (Return)
+ // "*y = x" (Store)
+ //
+ // (The separation Value.Name() from Value.String() is useful
+ // for some analyses which distinguish the operation from the
+ // value it defines, e.g., 'y = local int' is both an allocation
+ // of memory 'local int' and a definition of a pointer y.)
+ String() string
+
+ // Parent returns the function to which this instruction
+ // belongs.
+ Parent() *Function
+
+ // Block returns the basic block to which this instruction
+ // belongs.
+ Block() *BasicBlock
+
+ // setBlock sets the basic block to which this instruction belongs.
+ setBlock(*BasicBlock)
+
+ // Operands returns the operands of this instruction: the
+ // set of Values it references.
+ //
+ // Specifically, it appends their addresses to rands, a
+ // user-provided slice, and returns the resulting slice,
+ // permitting avoidance of memory allocation.
+ //
+ // The operands are appended in undefined order, but the order
+ // is consistent for a given Instruction; the addresses are
+ // always non-nil but may point to a nil Value. Clients may
+ // store through the pointers, e.g. to effect a value
+ // renaming.
+ //
+ // Value.Referrers is a subset of the inverse of this
+ // relation. (Referrers are not tracked for all types of
+ // Values.)
+ Operands(rands []*Value) []*Value
+
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this
+ // instruction, or token.NoPos if it was not explicit in the
+ // source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Go token
+ // for an *ast.GoStmt. This permits a compact but approximate
+ // mapping from Instructions to source positions for use in
+ // diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Instruction
+ // corresponds to an ast.Expr; see the notes for Value.Pos.
+ // This position may be used to determine which non-Value
+ // Instruction corresponds to some ast.Stmts, but not all: If
+ // and Jump instructions have no Pos(), for example.)
+ Pos() token.Pos
+}
+
+// A Node is a node in the SSA value graph. Every concrete type that
+// implements Node is also either a Value, an Instruction, or both.
+//
+// Node contains the methods common to Value and Instruction, plus the
+// Operands and Referrers methods generalized to return nil for
+// non-Instructions and non-Values, respectively.
+//
+// Node is provided to simplify SSA graph algorithms. Clients should
+// use the more specific and informative Value or Instruction
+// interfaces where appropriate.
+//
+type Node interface {
+ // Common methods:
+ String() string
+ Pos() token.Pos
+ Parent() *Function
+
+ // Partial methods:
+ Operands(rands []*Value) []*Value // nil for non-Instructions
+ Referrers() *[]Instruction // nil for non-Values
+}
+
+// Function represents the parameters, results, and code of a function
+// or method.
+//
+// If Blocks is nil, this indicates an external function for which no
+// Go source code is available. In this case, FreeVars and Locals
+// are nil too. Clients performing whole-program analysis must
+// handle external functions specially.
+//
+// Blocks contains the function's control-flow graph (CFG).
+// Blocks[0] is the function entry point; block order is not otherwise
+// semantically significant, though it may affect the readability of
+// the disassembly.
+// To iterate over the blocks in dominance order, use DomPreorder().
+//
+// Recover is an optional second entry point to which control resumes
+// after a recovered panic. The Recover block may contain only a return
+// statement, preceded by a load of the function's named return
+// parameters, if any.
+//
+// A nested function (Parent()!=nil) that refers to one or more
+// lexically enclosing local variables ("free variables") has FreeVars.
+// Such functions cannot be called directly but require a
+// value created by MakeClosure which, via its Bindings, supplies
+// values for these parameters.
+//
+// If the function is a method (Signature.Recv() != nil) then the first
+// element of Params is the receiver parameter.
+//
+// A Go package may declare many functions called "init".
+// For each one, Object().Name() returns "init" but Name() returns
+// "init#1", etc, in declaration order.
+//
+// Pos() returns the declaring ast.FuncLit.Type.Func or the position
+// of the ast.FuncDecl.Name, if the function was explicit in the
+// source. Synthetic wrappers, for which Synthetic != "", may share
+// the same position as the function they wrap.
+// Syntax.Pos() always returns the position of the declaring "func" token.
+//
+// Type() returns the function's Signature.
+//
+type Function struct {
+ name string
+ object types.Object // a declared *types.Func or one of its wrappers
+ method *types.Selection // info about provenance of synthetic methods
+ Signature *types.Signature
+ pos token.Pos
+
+ Synthetic string // provenance of synthetic function; "" for true source functions
+ syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode
+ parent *Function // enclosing function if anon; nil if global
+ Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error)
+ Prog *Program // enclosing program
+ Params []*Parameter // function parameters; for methods, includes receiver
+ FreeVars []*FreeVar // free variables whose values must be supplied by closure
+ Locals []*Alloc // local variables of this function
+ Blocks []*BasicBlock // basic blocks of the function; nil => external
+ Recover *BasicBlock // optional; control transfers here after recovered panic
+ AnonFuncs []*Function // anonymous functions directly beneath this one
+ referrers []Instruction // referring instructions (iff Parent() != nil)
+
+ // The following fields are set transiently during building,
+ // then cleared.
+ currentBlock *BasicBlock // where to emit code
+ objects map[types.Object]Value // addresses of local variables
+ namedResults []*Alloc // tuple of named results
+ targets *targets // linked stack of branch targets
+ lblocks map[*ast.Object]*lblock // labelled blocks
+}
+
+// BasicBlock represents an SSA basic block.
+//
+// The final element of Instrs is always an explicit transfer of
+// control (If, Jump, Return, or Panic).
+//
+// A block may contain no Instructions only if it is unreachable,
+// i.e., Preds is nil. Empty blocks are typically pruned.
+//
+// BasicBlocks and their Preds/Succs relation form a (possibly cyclic)
+// graph independent of the SSA Value graph: the control-flow graph or
+// CFG. It is illegal for multiple edges to exist between the same
+// pair of blocks.
+//
+// Each BasicBlock is also a node in the dominator tree of the CFG.
+// The tree may be navigated using Idom()/Dominees() and queried using
+// Dominates().
+//
+// The order of Preds and Succs is significant (to Phi and If
+// instructions, respectively).
+//
+type BasicBlock struct {
+ Index int // index of this block within Parent().Blocks
+ Comment string // optional label; no semantic significance
+ parent *Function // parent function
+ Instrs []Instruction // instructions in order
+ Preds, Succs []*BasicBlock // predecessors and successors
+ succs2 [2]*BasicBlock // initial space for Succs
+ dom domInfo // dominator tree info
+ gaps int // number of nil Instrs (transient)
+ rundefers int // number of rundefers (transient)
+}
+
+// Pure values ----------------------------------------
+
+// A FreeVar represents a free variable of the function to which it
+// belongs.
+//
+// FreeVars are used to implement anonymous functions, whose free
+// variables are lexically captured in a closure formed by
+// MakeClosure. The value of such a free var is an Alloc or another
+// FreeVar and is considered a potentially escaping heap address, with
+// pointer type.
+//
+// FreeVars are also used to implement bound method closures. Such a
+// free var represents the receiver value and may be of any type that
+// has concrete methods.
+//
+// Pos() returns the position of the value that was captured, which
+// belongs to an enclosing function.
+//
+type FreeVar struct {
+ name string
+ typ types.Type
+ pos token.Pos
+ parent *Function
+ referrers []Instruction
+
+ // Transiently needed during building.
+ outer Value // the Value captured from the enclosing context.
+}
+
+// A Parameter represents an input parameter of a function.
+//
+type Parameter struct {
+ name string
+ object types.Object // a *types.Var; nil for non-source locals
+ typ types.Type
+ pos token.Pos
+ parent *Function
+ referrers []Instruction
+}
+
+// A Const represents the value of a constant expression.
+//
+// The underlying type of a constant may be any boolean, numeric, or
+// string type. In addition, a Const may represent the nil value of
+// any reference type---interface, map, channel, pointer, slice, or
+// function---but not "untyped nil".
+//
+// All source-level constant expressions are represented by a Const
+// of the same type and value.
+//
+// Value holds the exact value of the constant, independent of its
+// Type(), using the same representation as package go/exact uses for
+// constants, or nil for a typed nil value.
+//
+// Pos() returns token.NoPos.
+//
+// Example printed form:
+// 42:int
+// "hello":untyped string
+// 3+4i:MyComplex
+//
+type Const struct {
+ typ types.Type
+ Value exact.Value
+}
+
+// A Global is a named Value holding the address of a package-level
+// variable.
+//
+// Pos() returns the position of the ast.ValueSpec.Names[*]
+// identifier.
+//
+type Global struct {
+ name string
+ object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard
+ typ types.Type
+ pos token.Pos
+
+ Pkg *Package
+}
+
+// A Builtin represents a specific use of a built-in function, e.g. len.
+//
+// Builtins are immutable values. Builtins do not have addresses.
+// Builtins can only appear in CallCommon.Func.
+//
+// Name() indicates the function: one of the built-in functions from the
+// Go spec (excluding "make" and "new") or one of these ssa-defined
+// intrinsics:
+//
+// // wrapnilchk returns ptr if non-nil, panics otherwise.
+// // (For use in indirection wrappers.)
+// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T
+//
+// Object() returns a *types.Builtin for built-ins defined by the spec,
+// nil for others.
+//
+// Type() returns a *types.Signature representing the effective
+// signature of the built-in for this call.
+//
+type Builtin struct {
+ name string
+ sig *types.Signature
+}
+
+// Value-defining instructions ----------------------------------------
+
+// The Alloc instruction reserves space for a variable of the given type,
+// zero-initializes it, and yields its address.
+//
+// Alloc values are always addresses, and have pointer types, so the
+// type of the allocated variable is actually
+// Type().Underlying().(*types.Pointer).Elem().
+//
+// If Heap is false, Alloc allocates space in the function's
+// activation record (frame); we refer to an Alloc(Heap=false) as a
+// "local" alloc. Each local Alloc returns the same address each time
+// it is executed within the same activation; the space is
+// re-initialized to zero.
+//
+// If Heap is true, Alloc allocates space in the heap; we
+// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc
+// returns a different address each time it is executed.
+//
+// When Alloc is applied to a channel, map or slice type, it returns
+// the address of an uninitialized (nil) reference of that kind; store
+// the result of MakeSlice, MakeMap or MakeChan in that location to
+// instantiate these types.
+//
+// Pos() returns the ast.CompositeLit.Lbrace for a composite literal,
+// or the ast.CallExpr.Rparen for a call to new() or for a call that
+// allocates a varargs slice.
+//
+// Example printed form:
+// t0 = local int
+// t1 = new int
+//
+type Alloc struct {
+ register
+ Comment string
+ Heap bool
+ index int // dense numbering; for lifting
+}
+
+var _ Instruction = (*Sigma)(nil)
+var _ Value = (*Sigma)(nil)
+
+type Sigma struct {
+ register
+ X Value
+ Branch bool
+}
+
+func (p *Sigma) Value() Value {
+ v := p.X
+ for {
+ sigma, ok := v.(*Sigma)
+ if !ok {
+ break
+ }
+ v = sigma
+ }
+ return v
+}
+
+func (p *Sigma) String() string {
+ return fmt.Sprintf("σ [%s.%t]", relName(p.X, p), p.Branch)
+}
+
+// The Phi instruction represents an SSA φ-node, which combines values
+// that differ across incoming control-flow edges and yields a new
+// value. Within a block, all φ-nodes must appear before all non-φ
+// nodes.
+//
+// Pos() returns the position of the && or || for short-circuit
+// control-flow joins, or that of the *Alloc for φ-nodes inserted
+// during SSA renaming.
+//
+// Example printed form:
+// t2 = phi [0: t0, 1: t1]
+//
+type Phi struct {
+ register
+ Comment string // a hint as to its purpose
+ Edges []Value // Edges[i] is value for Block().Preds[i]
+}
+
+// The Call instruction represents a function or method call.
+//
+// The Call instruction yields the function result if there is exactly
+// one. Otherwise it returns a tuple, the components of which are
+// accessed via Extract.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.CallExpr.Lparen, if explicit in the source.
+//
+// Example printed form:
+// t2 = println(t0, t1)
+// t4 = t3()
+// t7 = invoke t5.Println(...t6)
+//
+type Call struct {
+ register
+ Call CallCommon
+}
+
+// The BinOp instruction yields the result of binary operation X Op Y.
+//
+// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source.
+//
+// Example printed form:
+// t1 = t0 + 1:int
+//
+type BinOp struct {
+ register
+ // One of:
+ // ADD SUB MUL QUO REM + - * / %
+ // AND OR XOR SHL SHR AND_NOT & | ^ << >> &~
+ // EQL LSS GTR NEQ LEQ GEQ == != < <= < >=
+ Op token.Token
+ X, Y Value
+}
+
+// The UnOp instruction yields the result of Op X.
+// ARROW is channel receive.
+// MUL is pointer indirection (load).
+// XOR is bitwise complement.
+// SUB is negation.
+// NOT is logical negation.
+//
+// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above
+// and a boolean indicating the success of the receive. The
+// components of the tuple are accessed using Extract.
+//
+// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source.
+// For receive operations (ARROW) implicit in ranging over a channel,
+// Pos() returns the ast.RangeStmt.For.
+// For implicit memory loads (STAR), Pos() returns the position of the
+// most closely associated source-level construct; the details are not
+// specified.
+//
+// Example printed form:
+// t0 = *x
+// t2 = <-t1,ok
+//
+type UnOp struct {
+ register
+ Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^
+ X Value
+ CommaOk bool
+}
+
+// The ChangeType instruction applies to X a value-preserving type
+// change to Type().
+//
+// Type changes are permitted:
+// - between a named type and its underlying type.
+// - between two named types of the same underlying type.
+// - between (possibly named) pointers to identical base types.
+// - from a bidirectional channel to a read- or write-channel,
+// optionally adding/removing a name.
+//
+// This operation cannot fail dynamically.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// t1 = changetype *int <- IntPtr (t0)
+//
+type ChangeType struct {
+ register
+ X Value
+}
+
+// The Convert instruction yields the conversion of value X to type
+// Type(). One or both of those types is basic (but possibly named).
+//
+// A conversion may change the value and representation of its operand.
+// Conversions are permitted:
+// - between real numeric types.
+// - between complex numeric types.
+// - between string and []byte or []rune.
+// - between pointers and unsafe.Pointer.
+// - between unsafe.Pointer and uintptr.
+// - from (Unicode) integer to (UTF-8) string.
+// A conversion may imply a type name change also.
+//
+// This operation cannot fail dynamically.
+//
+// Conversions of untyped string/number/bool constants to a specific
+// representation are eliminated during SSA construction.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// t1 = convert []byte <- string (t0)
+//
+type Convert struct {
+ register
+ X Value
+}
+
+// ChangeInterface constructs a value of one interface type from a
+// value of another interface type known to be assignable to it.
+// This operation cannot fail.
+//
+// Pos() returns the ast.CallExpr.Lparen if the instruction arose from
+// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the
+// instruction arose from an explicit e.(T) operation; or token.NoPos
+// otherwise.
+//
+// Example printed form:
+// t1 = change interface interface{} <- I (t0)
+//
+type ChangeInterface struct {
+ register
+ X Value
+}
+
+// MakeInterface constructs an instance of an interface type from a
+// value of a concrete type.
+//
+// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set
+// of X, and Program.Method(m) to find the implementation of a method.
+//
+// To construct the zero value of an interface type T, use:
+// NewConst(exact.MakeNil(), T, pos)
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// t1 = make interface{} <- int (42:int)
+// t2 = make Stringer <- t0
+//
+type MakeInterface struct {
+ register
+ X Value
+}
+
+// The MakeClosure instruction yields a closure value whose code is
+// Fn and whose free variables' values are supplied by Bindings.
+//
+// Type() returns a (possibly named) *types.Signature.
+//
+// Pos() returns the ast.FuncLit.Type.Func for a function literal
+// closure or the ast.SelectorExpr.Sel for a bound method closure.
+//
+// Example printed form:
+// t0 = make closure anon@1.2 [x y z]
+// t1 = make closure bound$(main.I).add [i]
+//
+type MakeClosure struct {
+ register
+ Fn Value // always a *Function
+ Bindings []Value // values for each free variable in Fn.FreeVars
+}
+
+// The MakeMap instruction creates a new hash-table-based map object
+// and yields a value of kind map.
+//
+// Type() returns a (possibly named) *types.Map.
+//
+// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or
+// the ast.CompositeLit.Lbrack if created by a literal.
+//
+// Example printed form:
+// t1 = make map[string]int t0
+// t1 = make StringIntMap t0
+//
+type MakeMap struct {
+ register
+ Reserve Value // initial space reservation; nil => default
+}
+
+// The MakeChan instruction creates a new channel object and yields a
+// value of kind chan.
+//
+// Type() returns a (possibly named) *types.Chan.
+//
+// Pos() returns the ast.CallExpr.Lparen for the make(chan) that
+// created it.
+//
+// Example printed form:
+// t0 = make chan int 0
+// t0 = make IntChan 0
+//
+type MakeChan struct {
+ register
+ Size Value // int; size of buffer; zero => synchronous.
+}
+
+// The MakeSlice instruction yields a slice of length Len backed by a
+// newly allocated array of length Cap.
+//
+// Both Len and Cap must be non-nil Values of integer type.
+//
+// (Alloc(types.Array) followed by Slice will not suffice because
+// Alloc can only create arrays of constant length.)
+//
+// Type() returns a (possibly named) *types.Slice.
+//
+// Pos() returns the ast.CallExpr.Lparen for the make([]T) that
+// created it.
+//
+// Example printed form:
+// t1 = make []string 1:int t0
+// t1 = make StringSlice 1:int t0
+//
+type MakeSlice struct {
+ register
+ Len Value
+ Cap Value
+}
+
+// The Slice instruction yields a slice of an existing string, slice
+// or *array X between optional integer bounds Low and High.
+//
+// Dynamically, this instruction panics if X evaluates to a nil *array
+// pointer.
+//
+// Type() returns string if the type of X was string, otherwise a
+// *types.Slice with the same element type as X.
+//
+// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice
+// operation, the ast.CompositeLit.Lbrace if created by a literal, or
+// NoPos if not explicit in the source (e.g. a variadic argument slice).
+//
+// Example printed form:
+// t1 = slice t0[1:]
+//
+type Slice struct {
+ register
+ X Value // slice, string, or *array
+ Low, High, Max Value // each may be nil
+}
+
+// The FieldAddr instruction yields the address of Field of *struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X.
+//
+// Dynamically, this instruction panics if X evaluates to a nil
+// pointer.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Pos() returns the position of the ast.SelectorExpr.Sel for the
+// field, if explicit in the source.
+//
+// Example printed form:
+// t1 = &t0.name [#1]
+//
+type FieldAddr struct {
+ register
+ X Value // *struct
+ Field int // index into X.Type().Deref().(*types.Struct).Fields
+}
+
+// The Field instruction yields the Field of struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X; by using numeric indices we avoid ambiguity of
+// package-local identifiers and permit compact representations.
+//
+// Pos() returns the position of the ast.SelectorExpr.Sel for the
+// field, if explicit in the source.
+//
+// Example printed form:
+// t1 = t0.name [#1]
+//
+type Field struct {
+ register
+ X Value // struct
+ Field int // index into X.Type().(*types.Struct).Fields
+}
+
+// The IndexAddr instruction yields the address of the element at
+// index Index of collection X. Index is an integer expression.
+//
+// The elements of maps and strings are not addressable; use Lookup or
+// MapUpdate instead.
+//
+// Dynamically, this instruction panics if X evaluates to a nil *array
+// pointer.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
+// explicit in the source.
+//
+// Example printed form:
+// t2 = &t0[t1]
+//
+type IndexAddr struct {
+ register
+ X Value // slice or *array,
+ Index Value // numeric index
+}
+
+// The Index instruction yields element Index of array X.
+//
+// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
+// explicit in the source.
+//
+// Example printed form:
+// t2 = t0[t1]
+//
+type Index struct {
+ register
+ X Value // array
+ Index Value // integer index
+}
+
+// The Lookup instruction yields element Index of collection X, a map
+// or string. Index is an integer expression if X is a string or the
+// appropriate key type if X is a map.
+//
+// If CommaOk, the result is a 2-tuple of the value above and a
+// boolean indicating the result of a map membership test for the key.
+// The components of the tuple are accessed using Extract.
+//
+// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source.
+//
+// Example printed form:
+// t2 = t0[t1]
+// t5 = t3[t4],ok
+//
+type Lookup struct {
+ register
+ X Value // string or map
+ Index Value // numeric or key-typed index
+ CommaOk bool // return a value,ok pair
+}
+
+// SelectState is a helper for Select.
+// It represents one goal state and its corresponding communication.
+//
+type SelectState struct {
+ Dir types.ChanDir // direction of case (SendOnly or RecvOnly)
+ Chan Value // channel to use (for send or receive)
+ Send Value // value to send (for send)
+ Pos token.Pos // position of token.ARROW
+ DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode]
+}
+
+// The Select instruction tests whether (or blocks until) one
+// of the specified sent or received states is entered.
+//
+// Let n be the number of States for which Dir==RECV and T_i (0<=i<n)
+// be the element type of each such state's Chan.
+// Select returns an n+2-tuple
+// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1)
+// The tuple's components, described below, must be accessed via the
+// Extract instruction.
+//
+// If Blocking, select waits until exactly one state holds, i.e. a
+// channel becomes ready for the designated operation of sending or
+// receiving; select chooses one among the ready states
+// pseudorandomly, performs the send or receive operation, and sets
+// 'index' to the index of the chosen channel.
+//
+// If !Blocking, select doesn't block if no states hold; instead it
+// returns immediately with index equal to -1.
+//
+// If the chosen channel was used for a receive, the r_i component is
+// set to the received value, where i is the index of that state among
+// all n receive states; otherwise r_i has the zero value of type T_i.
+// Note that the receive index i is not the same as the state
+// index index.
+//
+// The second component of the triple, recvOk, is a boolean whose value
+// is true iff the selected operation was a receive and the receive
+// successfully yielded a value.
+//
+// Pos() returns the ast.SelectStmt.Select.
+//
+// Example printed form:
+// t3 = select nonblocking [<-t0, t1<-t2]
+// t4 = select blocking []
+//
+type Select struct {
+ register
+ States []*SelectState
+ Blocking bool
+}
+
+// The Range instruction yields an iterator over the domain and range
+// of X, which must be a string or map.
+//
+// Elements are accessed via Next.
+//
+// Type() returns an opaque and degenerate "rangeIter" type.
+//
+// Pos() returns the ast.RangeStmt.For.
+//
+// Example printed form:
+// t0 = range "hello":string
+//
+type Range struct {
+ register
+ X Value // string or map
+}
+
+// The Next instruction reads and advances the (map or string)
+// iterator Iter and returns a 3-tuple value (ok, k, v). If the
+// iterator is not exhausted, ok is true and k and v are the next
+// elements of the domain and range, respectively. Otherwise ok is
+// false and k and v are undefined.
+//
+// Components of the tuple are accessed using Extract.
+//
+// The IsString field distinguishes iterators over strings from those
+// over maps, as the Type() alone is insufficient: consider
+// map[int]rune.
+//
+// Type() returns a *types.Tuple for the triple (ok, k, v).
+// The types of k and/or v may be types.Invalid.
+//
+// Example printed form:
+// t1 = next t0
+//
+type Next struct {
+ register
+ Iter Value
+ IsString bool // true => string iterator; false => map iterator.
+}
+
+// The TypeAssert instruction tests whether interface value X has type
+// AssertedType.
+//
+// If !CommaOk, on success it returns v, the result of the conversion
+// (defined below); on failure it panics.
+//
+// If CommaOk: on success it returns a pair (v, true) where v is the
+// result of the conversion; on failure it returns (z, false) where z
+// is AssertedType's zero value. The components of the pair must be
+// accessed using the Extract instruction.
+//
+// If AssertedType is a concrete type, TypeAssert checks whether the
+// dynamic type in interface X is equal to it, and if so, the result
+// of the conversion is a copy of the value in the interface.
+//
+// If AssertedType is an interface, TypeAssert checks whether the
+// dynamic type of the interface is assignable to it, and if so, the
+// result of the conversion is a copy of the interface value X.
+// If AssertedType is a superinterface of X.Type(), the operation will
+// fail iff the operand is nil. (Contrast with ChangeInterface, which
+// performs no nil-check.)
+//
+// Type() reflects the actual type of the result, possibly a
+// 2-types.Tuple; AssertedType is the asserted type.
+//
+// Pos() returns the ast.CallExpr.Lparen if the instruction arose from
+// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the
+// instruction arose from an explicit e.(T) operation; or the
+// ast.CaseClause.Case if the instruction arose from a case of a
+// type-switch statement.
+//
+// Example printed form:
+// t1 = typeassert t0.(int)
+// t3 = typeassert,ok t2.(T)
+//
+type TypeAssert struct {
+ register
+ X Value
+ AssertedType types.Type
+ CommaOk bool
+}
+
+// The Extract instruction yields component Index of Tuple.
+//
+// This is used to access the results of instructions with multiple
+// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and
+// IndexExpr(Map).
+//
+// Example printed form:
+// t1 = extract t0 #1
+//
+type Extract struct {
+ register
+ Tuple Value
+ Index int
+}
+
+// Instructions executed for effect. They do not yield a value. --------------------
+
+// The Jump instruction transfers control to the sole successor of its
+// owning block.
+//
+// A Jump must be the last instruction of its containing BasicBlock.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// jump done
+//
+type Jump struct {
+ anInstruction
+}
+
+// The If instruction transfers control to one of the two successors
+// of its owning block, depending on the boolean Cond: the first if
+// true, the second if false.
+//
+// An If instruction must be the last instruction of its containing
+// BasicBlock.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// if t0 goto done else body
+//
+type If struct {
+ anInstruction
+ Cond Value
+}
+
+// The Return instruction returns values and control back to the calling
+// function.
+//
+// len(Results) is always equal to the number of results in the
+// function's signature.
+//
+// If len(Results) > 1, Return returns a tuple value with the specified
+// components which the caller must access using Extract instructions.
+//
+// There is no instruction to return a ready-made tuple like those
+// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or
+// a tail-call to a function with multiple result parameters.
+//
+// Return must be the last instruction of its containing BasicBlock.
+// Such a block has no successors.
+//
+// Pos() returns the ast.ReturnStmt.Return, if explicit in the source.
+//
+// Example printed form:
+// return
+// return nil:I, 2:int
+//
+type Return struct {
+ anInstruction
+ Results []Value
+ pos token.Pos
+}
+
+// The RunDefers instruction pops and invokes the entire stack of
+// procedure calls pushed by Defer instructions in this function.
+//
+// It is legal to encounter multiple 'rundefers' instructions in a
+// single control-flow path through a function; this is useful in
+// the combined init() function, for example.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// rundefers
+//
+type RunDefers struct {
+ anInstruction
+}
+
+// The Panic instruction initiates a panic with value X.
+//
+// A Panic instruction must be the last instruction of its containing
+// BasicBlock, which must have no successors.
+//
+// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction;
+// they are treated as calls to a built-in function.
+//
+// Pos() returns the ast.CallExpr.Lparen if this panic was explicit
+// in the source.
+//
+// Example printed form:
+// panic t0
+//
+type Panic struct {
+ anInstruction
+ X Value // an interface{}
+ pos token.Pos
+}
+
+// The Go instruction creates a new goroutine and calls the specified
+// function within it.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.GoStmt.Go.
+//
+// Example printed form:
+// go println(t0, t1)
+// go t3()
+// go invoke t5.Println(...t6)
+//
+type Go struct {
+ anInstruction
+ Call CallCommon
+ pos token.Pos
+}
+
+// The Defer instruction pushes the specified call onto a stack of
+// functions to be called by a RunDefers instruction or by a panic.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.DeferStmt.Defer.
+//
+// Example printed form:
+// defer println(t0, t1)
+// defer t3()
+// defer invoke t5.Println(...t6)
+//
+type Defer struct {
+ anInstruction
+ Call CallCommon
+ pos token.Pos
+}
+
+// The Send instruction sends X on channel Chan.
+//
+// Pos() returns the ast.SendStmt.Arrow, if explicit in the source.
+//
+// Example printed form:
+// send t0 <- t1
+//
+type Send struct {
+ anInstruction
+ Chan, X Value
+ pos token.Pos
+}
+
+// The Store instruction stores Val at address Addr.
+// Stores can be of arbitrary types.
+//
+// Pos() returns the position of the source-level construct most closely
+// associated with the memory store operation.
+// Since implicit memory stores are numerous and varied and depend upon
+// implementation choices, the details are not specified.
+//
+// Example printed form:
+// *x = y
+//
+type Store struct {
+ anInstruction
+ Addr Value
+ Val Value
+ pos token.Pos
+}
+
+// The BlankStore instruction is emitted for assignments to the blank
+// identifier.
+//
+// BlankStore is a pseudo-instruction: it has no dynamic effect.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+// _ = t0
+//
+type BlankStore struct {
+ anInstruction
+ Val Value
+}
+
+// The MapUpdate instruction updates the association of Map[Key] to
+// Value.
+//
+// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack,
+// if explicit in the source.
+//
+// Example printed form:
+// t0[t1] = t2
+//
+type MapUpdate struct {
+ anInstruction
+ Map Value
+ Key Value
+ Value Value
+ pos token.Pos
+}
+
+// A DebugRef instruction maps a source-level expression Expr to the
+// SSA value X that represents the value (!IsAddr) or address (IsAddr)
+// of that expression.
+//
+// DebugRef is a pseudo-instruction: it has no dynamic effect.
+//
+// Pos() returns Expr.Pos(), the start position of the source-level
+// expression. This is not the same as the "designated" token as
+// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the
+// position of the ("designated") Lparen token.
+//
+// If Expr is an *ast.Ident denoting a var or func, Object() returns
+// the object; though this information can be obtained from the type
+// checker, including it here greatly facilitates debugging.
+// For non-Ident expressions, Object() returns nil.
+//
+// DebugRefs are generated only for functions built with debugging
+// enabled; see Package.SetDebugMode() and the GlobalDebug builder
+// mode flag.
+//
+// DebugRefs are not emitted for ast.Idents referring to constants or
+// predeclared identifiers, since they are trivial and numerous.
+// Nor are they emitted for ast.ParenExprs.
+//
+// (By representing these as instructions, rather than out-of-band,
+// consistency is maintained during transformation passes by the
+// ordinary SSA renaming machinery.)
+//
+// Example printed form:
+// ; *ast.CallExpr @ 102:9 is t5
+// ; var x float64 @ 109:72 is x
+// ; address of *ast.CompositeLit @ 216:10 is t0
+//
+type DebugRef struct {
+ anInstruction
+ Expr ast.Expr // the referring expression (never *ast.ParenExpr)
+ object types.Object // the identity of the source var/func
+ IsAddr bool // Expr is addressable and X is the address it denotes
+ X Value // the value or address of Expr
+}
+
+// Embeddable mix-ins and helpers for common parts of other structs. -----------
+
+// register is a mix-in embedded by all SSA values that are also
+// instructions, i.e. virtual registers, and provides a uniform
+// implementation of most of the Value interface: Value.Name() is a
+// numbered register (e.g. "t0"); the other methods are field accessors.
+//
+// Temporary names are automatically assigned to each register on
+// completion of building a function in SSA form.
+//
+// Clients must not assume that the 'id' value (and the Name() derived
+// from it) is unique within a function. As always in this API,
+// semantics are determined only by identity; names exist only to
+// facilitate debugging.
+//
+type register struct {
+ anInstruction
+ num int // "name" of virtual register, e.g. "t0". Not guaranteed unique.
+ typ types.Type // type of virtual register
+ pos token.Pos // position of source expression, or NoPos
+ referrers []Instruction
+}
+
+// anInstruction is a mix-in embedded by all Instructions.
+// It provides the implementations of the Block and setBlock methods.
+type anInstruction struct {
+ block *BasicBlock // the basic block of this instruction
+}
+
+// CallCommon is contained by Go, Defer and Call to hold the
+// common parts of a function or method call.
+//
+// Each CallCommon exists in one of two modes, function call and
+// interface method invocation, or "call" and "invoke" for short.
+//
+// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon
+// represents an ordinary function call of the value in Value,
+// which may be a *Builtin, a *Function or any other value of kind
+// 'func'.
+//
+// Value may be one of:
+// (a) a *Function, indicating a statically dispatched call
+// to a package-level function, an anonymous function, or
+// a method of a named type.
+// (b) a *MakeClosure, indicating an immediately applied
+// function literal with free variables.
+// (c) a *Builtin, indicating a statically dispatched call
+// to a built-in function.
+// (d) any other value, indicating a dynamically dispatched
+// function call.
+// StaticCallee returns the identity of the callee in cases
+// (a) and (b), nil otherwise.
+//
+// Args contains the arguments to the call. If Value is a method,
+// Args[0] contains the receiver parameter.
+//
+// Example printed form:
+// t2 = println(t0, t1)
+// go t3()
+// defer t5(...t6)
+//
+// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon
+// represents a dynamically dispatched call to an interface method.
+// In this mode, Value is the interface value and Method is the
+// interface's abstract method. Note: an abstract method may be
+// shared by multiple interfaces due to embedding; Value.Type()
+// provides the specific interface used for this call.
+//
+// Value is implicitly supplied to the concrete method implementation
+// as the receiver parameter; in other words, Args[0] holds not the
+// receiver but the first true argument.
+//
+// Example printed form:
+// t1 = invoke t0.String()
+// go invoke t3.Run(t2)
+// defer invoke t4.Handle(...t5)
+//
+// For all calls to variadic functions (Signature().Variadic()),
+// the last element of Args is a slice.
+//
+type CallCommon struct {
+ Value Value // receiver (invoke mode) or func value (call mode)
+ Method *types.Func // abstract method (invoke mode)
+ Args []Value // actual parameters (in static method call, includes receiver)
+ pos token.Pos // position of CallExpr.Lparen, iff explicit in source
+}
+
+// IsInvoke returns true if this call has "invoke" (not "call") mode.
+func (c *CallCommon) IsInvoke() bool {
+ return c.Method != nil
+}
+
+func (c *CallCommon) Pos() token.Pos { return c.pos }
+
+// Signature returns the signature of the called function.
+//
+// For an "invoke"-mode call, the signature of the interface method is
+// returned.
+//
+// In either "call" or "invoke" mode, if the callee is a method, its
+// receiver is represented by sig.Recv, not sig.Params().At(0).
+//
+func (c *CallCommon) Signature() *types.Signature {
+ if c.Method != nil {
+ return c.Method.Type().(*types.Signature)
+ }
+ return c.Value.Type().Underlying().(*types.Signature)
+}
+
+// StaticCallee returns the callee if this is a trivially static
+// "call"-mode call to a function.
+func (c *CallCommon) StaticCallee() *Function {
+ switch fn := c.Value.(type) {
+ case *Function:
+ return fn
+ case *MakeClosure:
+ return fn.Fn.(*Function)
+ }
+ return nil
+}
+
+// Description returns a description of the mode of this call suitable
+// for a user interface, e.g., "static method call".
+func (c *CallCommon) Description() string {
+ switch fn := c.Value.(type) {
+ case *Builtin:
+ return "built-in function call"
+ case *MakeClosure:
+ return "static function closure call"
+ case *Function:
+ if fn.Signature.Recv() != nil {
+ return "static method call"
+ }
+ return "static function call"
+ }
+ if c.IsInvoke() {
+ return "dynamic method call" // ("invoke" mode)
+ }
+ return "dynamic function call"
+}
+
+// The CallInstruction interface, implemented by *Go, *Defer and *Call,
+// exposes the common parts of function-calling instructions,
+// yet provides a way back to the Value defined by *Call alone.
+//
+type CallInstruction interface {
+ Instruction
+ Common() *CallCommon // returns the common parts of the call
+ Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer)
+}
+
+func (s *Call) Common() *CallCommon { return &s.Call }
+func (s *Defer) Common() *CallCommon { return &s.Call }
+func (s *Go) Common() *CallCommon { return &s.Call }
+
+func (s *Call) Value() *Call { return s }
+func (s *Defer) Value() *Call { return nil }
+func (s *Go) Value() *Call { return nil }
+
+func (v *Builtin) Type() types.Type { return v.sig }
+func (v *Builtin) Name() string { return v.name }
+func (*Builtin) Referrers() *[]Instruction { return nil }
+func (v *Builtin) Pos() token.Pos { return token.NoPos }
+func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) }
+func (v *Builtin) Parent() *Function { return nil }
+
+func (v *FreeVar) Type() types.Type { return v.typ }
+func (v *FreeVar) Name() string { return v.name }
+func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers }
+func (v *FreeVar) Pos() token.Pos { return v.pos }
+func (v *FreeVar) Parent() *Function { return v.parent }
+
+func (v *Global) Type() types.Type { return v.typ }
+func (v *Global) Name() string { return v.name }
+func (v *Global) Parent() *Function { return nil }
+func (v *Global) Pos() token.Pos { return v.pos }
+func (v *Global) Referrers() *[]Instruction { return nil }
+func (v *Global) Token() token.Token { return token.VAR }
+func (v *Global) Object() types.Object { return v.object }
+func (v *Global) String() string { return v.RelString(nil) }
+func (v *Global) Package() *Package { return v.Pkg }
+func (v *Global) RelString(from *types.Package) string { return relString(v, from) }
+
+func (v *Function) Name() string { return v.name }
+func (v *Function) Type() types.Type { return v.Signature }
+func (v *Function) Pos() token.Pos { return v.pos }
+func (v *Function) Token() token.Token { return token.FUNC }
+func (v *Function) Object() types.Object { return v.object }
+func (v *Function) String() string { return v.RelString(nil) }
+func (v *Function) Package() *Package { return v.Pkg }
+func (v *Function) Parent() *Function { return v.parent }
+func (v *Function) Referrers() *[]Instruction {
+ if v.parent != nil {
+ return &v.referrers
+ }
+ return nil
+}
+
+func (v *Parameter) Type() types.Type { return v.typ }
+func (v *Parameter) Name() string { return v.name }
+func (v *Parameter) Object() types.Object { return v.object }
+func (v *Parameter) Referrers() *[]Instruction { return &v.referrers }
+func (v *Parameter) Pos() token.Pos { return v.pos }
+func (v *Parameter) Parent() *Function { return v.parent }
+
+func (v *Alloc) Type() types.Type { return v.typ }
+func (v *Alloc) Referrers() *[]Instruction { return &v.referrers }
+func (v *Alloc) Pos() token.Pos { return v.pos }
+
+func (v *register) Type() types.Type { return v.typ }
+func (v *register) setType(typ types.Type) { v.typ = typ }
+func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) }
+func (v *register) setNum(num int) { v.num = num }
+func (v *register) Referrers() *[]Instruction { return &v.referrers }
+func (v *register) Pos() token.Pos { return v.pos }
+func (v *register) setPos(pos token.Pos) { v.pos = pos }
+
+func (v *anInstruction) Parent() *Function { return v.block.parent }
+func (v *anInstruction) Block() *BasicBlock { return v.block }
+func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block }
+func (v *anInstruction) Referrers() *[]Instruction { return nil }
+
+func (t *Type) Name() string { return t.object.Name() }
+func (t *Type) Pos() token.Pos { return t.object.Pos() }
+func (t *Type) Type() types.Type { return t.object.Type() }
+func (t *Type) Token() token.Token { return token.TYPE }
+func (t *Type) Object() types.Object { return t.object }
+func (t *Type) String() string { return t.RelString(nil) }
+func (t *Type) Package() *Package { return t.pkg }
+func (t *Type) RelString(from *types.Package) string { return relString(t, from) }
+
+func (c *NamedConst) Name() string { return c.object.Name() }
+func (c *NamedConst) Pos() token.Pos { return c.object.Pos() }
+func (c *NamedConst) String() string { return c.RelString(nil) }
+func (c *NamedConst) Type() types.Type { return c.object.Type() }
+func (c *NamedConst) Token() token.Token { return token.CONST }
+func (c *NamedConst) Object() types.Object { return c.object }
+func (c *NamedConst) Package() *Package { return c.pkg }
+func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) }
+
+// Func returns the package-level function of the specified name,
+// or nil if not found.
+//
+func (p *Package) Func(name string) (f *Function) {
+ f, _ = p.Members[name].(*Function)
+ return
+}
+
+// Var returns the package-level variable of the specified name,
+// or nil if not found.
+//
+func (p *Package) Var(name string) (g *Global) {
+ g, _ = p.Members[name].(*Global)
+ return
+}
+
+// Const returns the package-level constant of the specified name,
+// or nil if not found.
+//
+func (p *Package) Const(name string) (c *NamedConst) {
+ c, _ = p.Members[name].(*NamedConst)
+ return
+}
+
+// Type returns the package-level type of the specified name,
+// or nil if not found.
+//
+func (p *Package) Type(name string) (t *Type) {
+ t, _ = p.Members[name].(*Type)
+ return
+}
+
+func (v *Call) Pos() token.Pos { return v.Call.pos }
+func (s *Defer) Pos() token.Pos { return s.pos }
+func (s *Go) Pos() token.Pos { return s.pos }
+func (s *MapUpdate) Pos() token.Pos { return s.pos }
+func (s *Panic) Pos() token.Pos { return s.pos }
+func (s *Return) Pos() token.Pos { return s.pos }
+func (s *Send) Pos() token.Pos { return s.pos }
+func (s *Store) Pos() token.Pos { return s.pos }
+func (s *BlankStore) Pos() token.Pos { return token.NoPos }
+func (s *If) Pos() token.Pos { return token.NoPos }
+func (s *Jump) Pos() token.Pos { return token.NoPos }
+func (s *RunDefers) Pos() token.Pos { return token.NoPos }
+func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() }
+
+// Operands.
+
+func (v *Alloc) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *BinOp) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Y)
+}
+
+func (c *CallCommon) Operands(rands []*Value) []*Value {
+ rands = append(rands, &c.Value)
+ for i := range c.Args {
+ rands = append(rands, &c.Args[i])
+ }
+ return rands
+}
+
+func (s *Go) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (s *Call) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (s *Defer) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (v *ChangeInterface) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *ChangeType) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *Convert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *DebugRef) Operands(rands []*Value) []*Value {
+ return append(rands, &s.X)
+}
+
+func (v *Extract) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Tuple)
+}
+
+func (v *Field) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *FieldAddr) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *If) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Cond)
+}
+
+func (v *Index) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (v *IndexAddr) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (*Jump) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *Lookup) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (v *MakeChan) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Size)
+}
+
+func (v *MakeClosure) Operands(rands []*Value) []*Value {
+ rands = append(rands, &v.Fn)
+ for i := range v.Bindings {
+ rands = append(rands, &v.Bindings[i])
+ }
+ return rands
+}
+
+func (v *MakeInterface) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *MakeMap) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Reserve)
+}
+
+func (v *MakeSlice) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Len, &v.Cap)
+}
+
+func (v *MapUpdate) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Map, &v.Key, &v.Value)
+}
+
+func (v *Next) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Iter)
+}
+
+func (s *Panic) Operands(rands []*Value) []*Value {
+ return append(rands, &s.X)
+}
+
+func (v *Sigma) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *Phi) Operands(rands []*Value) []*Value {
+ for i := range v.Edges {
+ rands = append(rands, &v.Edges[i])
+ }
+ return rands
+}
+
+func (v *Range) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *Return) Operands(rands []*Value) []*Value {
+ for i := range s.Results {
+ rands = append(rands, &s.Results[i])
+ }
+ return rands
+}
+
+func (*RunDefers) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *Select) Operands(rands []*Value) []*Value {
+ for i := range v.States {
+ rands = append(rands, &v.States[i].Chan, &v.States[i].Send)
+ }
+ return rands
+}
+
+func (s *Send) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Chan, &s.X)
+}
+
+func (v *Slice) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Low, &v.High, &v.Max)
+}
+
+func (s *Store) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Addr, &s.Val)
+}
+
+func (s *BlankStore) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Val)
+}
+
+func (v *TypeAssert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *UnOp) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+// Non-Instruction Values:
+func (v *Builtin) Operands(rands []*Value) []*Value { return rands }
+func (v *FreeVar) Operands(rands []*Value) []*Value { return rands }
+func (v *Const) Operands(rands []*Value) []*Value { return rands }
+func (v *Function) Operands(rands []*Value) []*Value { return rands }
+func (v *Global) Operands(rands []*Value) []*Value { return rands }
+func (v *Parameter) Operands(rands []*Value) []*Value { return rands }
diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/load.go b/vendor/honnef.co/go/tools/ssa/ssautil/load.go
new file mode 100644
index 0000000..2fc108f
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/ssautil/load.go
@@ -0,0 +1,97 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssautil
+
+// This file defines utility functions for constructing programs in SSA form.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/loader"
+ "honnef.co/go/tools/ssa"
+)
+
+// CreateProgram returns a new program in SSA form, given a program
+// loaded from source. An SSA package is created for each transitively
+// error-free package of lprog.
+//
+// Code for bodies of functions is not built until Build is called
+// on the result.
+//
+// mode controls diagnostics and checking during SSA construction.
+//
+func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
+ prog := ssa.NewProgram(lprog.Fset, mode)
+
+ for _, info := range lprog.AllPackages {
+ if info.TransitivelyErrorFree {
+ prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
+ }
+ }
+
+ return prog
+}
+
+// BuildPackage builds an SSA program with IR for a single package.
+//
+// It populates pkg by type-checking the specified file ASTs. All
+// dependencies are loaded using the importer specified by tc, which
+// typically loads compiler export data; SSA code cannot be built for
+// those packages. BuildPackage then constructs an ssa.Program with all
+// dependency packages created, and builds and returns the SSA package
+// corresponding to pkg.
+//
+// The caller must have set pkg.Path() to the import path.
+//
+// The operation fails if there were any type-checking or import errors.
+//
+// See ../ssa/example_test.go for an example.
+//
+func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) {
+ if fset == nil {
+ panic("no token.FileSet")
+ }
+ if pkg.Path() == "" {
+ panic("package has no import path")
+ }
+
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
+ return nil, nil, err
+ }
+
+ prog := ssa.NewProgram(fset, mode)
+
+ // Create SSA packages for all imports.
+ // Order is not significant.
+ created := make(map[*types.Package]bool)
+ var createAll func(pkgs []*types.Package)
+ createAll = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !created[p] {
+ created[p] = true
+ prog.CreatePackage(p, nil, nil, true)
+ createAll(p.Imports())
+ }
+ }
+ }
+ createAll(pkg.Imports())
+
+ // Create and build the primary package.
+ ssapkg := prog.CreatePackage(pkg, files, info, false)
+ ssapkg.Build()
+ return ssapkg, info, nil
+}
diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/switch.go b/vendor/honnef.co/go/tools/ssa/ssautil/switch.go
new file mode 100644
index 0000000..ef698b3
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/ssautil/switch.go
@@ -0,0 +1,236 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssautil
+
+// This file implements discovery of switch and type-switch constructs
+// from low-level control flow.
+//
+// Many techniques exist for compiling a high-level switch with
+// constant cases to efficient machine code. The optimal choice will
+// depend on the data type, the specific case values, the code in the
+// body of each case, and the hardware.
+// Some examples:
+// - a lookup table (for a switch that maps constants to constants)
+// - a computed goto
+// - a binary tree
+// - a perfect hash
+// - a two-level switch (to partition constant strings by their first byte).
+
+import (
+ "bytes"
+ "fmt"
+ "go/token"
+ "go/types"
+
+ "honnef.co/go/tools/ssa"
+)
+
+// A ConstCase represents a single constant comparison.
+// It is part of a Switch.
+type ConstCase struct {
+ Block *ssa.BasicBlock // block performing the comparison
+ Body *ssa.BasicBlock // body of the case
+ Value *ssa.Const // case comparand
+}
+
+// A TypeCase represents a single type assertion.
+// It is part of a Switch.
+type TypeCase struct {
+ Block *ssa.BasicBlock // block performing the type assert
+ Body *ssa.BasicBlock // body of the case
+ Type types.Type // case type
+ Binding ssa.Value // value bound by this case
+}
+
+// A Switch is a logical high-level control flow operation
+// (a multiway branch) discovered by analysis of a CFG containing
+// only if/else chains. It is not part of the ssa.Instruction set.
+//
+// One of ConstCases and TypeCases has length >= 2;
+// the other is nil.
+//
+// In a value switch, the list of cases may contain duplicate constants.
+// A type switch may contain duplicate types, or types assignable
+// to an interface type also in the list.
+// TODO(adonovan): eliminate such duplicates.
+//
+type Switch struct {
+ Start *ssa.BasicBlock // block containing start of if/else chain
+ X ssa.Value // the switch operand
+ ConstCases []ConstCase // ordered list of constant comparisons
+ TypeCases []TypeCase // ordered list of type assertions
+ Default *ssa.BasicBlock // successor if all comparisons fail
+}
+
+func (sw *Switch) String() string {
+ // We represent each block by the String() of its
+ // first Instruction, e.g. "print(42:int)".
+ var buf bytes.Buffer
+ if sw.ConstCases != nil {
+ fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name())
+ for _, c := range sw.ConstCases {
+ fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0])
+ }
+ } else {
+ fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name())
+ for _, c := range sw.TypeCases {
+ fmt.Fprintf(&buf, "case %s %s: %s\n",
+ c.Binding.Name(), c.Type, c.Body.Instrs[0])
+ }
+ }
+ if sw.Default != nil {
+ fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0])
+ }
+ fmt.Fprintf(&buf, "}")
+ return buf.String()
+}
+
+// Switches examines the control-flow graph of fn and returns the
+// set of inferred value and type switches. A value switch tests an
+// ssa.Value for equality against two or more compile-time constant
+// values. Switches involving link-time constants (addresses) are
+// ignored. A type switch type-asserts an ssa.Value against two or
+// more types.
+//
+// The switches are returned in dominance order.
+//
+// The resulting switches do not necessarily correspond to uses of the
+// 'switch' keyword in the source: for example, a single source-level
+// switch statement with non-constant cases may result in zero, one or
+// many Switches, one per plural sequence of constant cases.
+// Switches may even be inferred from if/else- or goto-based control flow.
+// (In general, the control flow constructs of the source program
+// cannot be faithfully reproduced from the SSA representation.)
+//
+func Switches(fn *ssa.Function) []Switch {
+ // Traverse the CFG in dominance order, so we don't
+ // enter an if/else-chain in the middle.
+ var switches []Switch
+ seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet
+ for _, b := range fn.DomPreorder() {
+ if x, k := isComparisonBlock(b); x != nil {
+ // Block b starts a switch.
+ sw := Switch{Start: b, X: x}
+ valueSwitch(&sw, k, seen)
+ if len(sw.ConstCases) > 1 {
+ switches = append(switches, sw)
+ }
+ }
+
+ if y, x, T := isTypeAssertBlock(b); y != nil {
+ // Block b starts a type switch.
+ sw := Switch{Start: b, X: x}
+ typeSwitch(&sw, y, T, seen)
+ if len(sw.TypeCases) > 1 {
+ switches = append(switches, sw)
+ }
+ }
+ }
+ return switches
+}
+
+func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) {
+ b := sw.Start
+ x := sw.X
+ for x == sw.X {
+ if seen[b] {
+ break
+ }
+ seen[b] = true
+
+ sw.ConstCases = append(sw.ConstCases, ConstCase{
+ Block: b,
+ Body: b.Succs[0],
+ Value: k,
+ })
+ b = b.Succs[1]
+ if len(b.Instrs) > 2 {
+ // Block b contains not just 'if x == k',
+ // so it may have side effects that
+ // make it unsafe to elide.
+ break
+ }
+ if len(b.Preds) != 1 {
+ // Block b has multiple predecessors,
+ // so it cannot be treated as a case.
+ break
+ }
+ x, k = isComparisonBlock(b)
+ }
+ sw.Default = b
+}
+
+func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) {
+ b := sw.Start
+ x := sw.X
+ for x == sw.X {
+ if seen[b] {
+ break
+ }
+ seen[b] = true
+
+ sw.TypeCases = append(sw.TypeCases, TypeCase{
+ Block: b,
+ Body: b.Succs[0],
+ Type: T,
+ Binding: y,
+ })
+ b = b.Succs[1]
+ if len(b.Instrs) > 4 {
+ // Block b contains not just
+ // {TypeAssert; Extract #0; Extract #1; If}
+ // so it may have side effects that
+ // make it unsafe to elide.
+ break
+ }
+ if len(b.Preds) != 1 {
+ // Block b has multiple predecessors,
+ // so it cannot be treated as a case.
+ break
+ }
+ y, x, T = isTypeAssertBlock(b)
+ }
+ sw.Default = b
+}
+
+// isComparisonBlock returns the operands (v, k) if a block ends with
+// a comparison v==k, where k is a compile-time constant.
+//
+func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) {
+ if n := len(b.Instrs); n >= 2 {
+ if i, ok := b.Instrs[n-1].(*ssa.If); ok {
+ if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL {
+ if k, ok := binop.Y.(*ssa.Const); ok {
+ return binop.X, k
+ }
+ if k, ok := binop.X.(*ssa.Const); ok {
+ return binop.Y, k
+ }
+ }
+ }
+ }
+ return
+}
+
+// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
+// a type assertion "if y, ok := x.(T); ok {".
+//
+func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) {
+ if n := len(b.Instrs); n >= 4 {
+ if i, ok := b.Instrs[n-1].(*ssa.If); ok {
+ if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 {
+ if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b {
+ // hack: relies upon instruction ordering.
+ if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok {
+ return ext0, ta.X, ta.AssertedType
+ }
+ }
+ }
+ }
+ }
+ return
+}
diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/visit.go b/vendor/honnef.co/go/tools/ssa/ssautil/visit.go
new file mode 100644
index 0000000..5c14845
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/ssautil/visit.go
@@ -0,0 +1,79 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil // import "honnef.co/go/tools/ssa/ssautil"
+
+import "honnef.co/go/tools/ssa"
+
+// This file defines utilities for visiting the SSA representation of
+// a Program.
+//
+// TODO(adonovan): test coverage.
+
+// AllFunctions finds and returns the set of functions potentially
+// needed by program prog, as determined by a simple linker-style
+// reachability algorithm starting from the members and method-sets of
+// each package. The result may include anonymous functions and
+// synthetic wrappers.
+//
+// Precondition: all packages are built.
+//
+func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool {
+ visit := visitor{
+ prog: prog,
+ seen: make(map[*ssa.Function]bool),
+ }
+ visit.program()
+ return visit.seen
+}
+
+type visitor struct {
+ prog *ssa.Program
+ seen map[*ssa.Function]bool
+}
+
+func (visit *visitor) program() {
+ for _, pkg := range visit.prog.AllPackages() {
+ for _, mem := range pkg.Members {
+ if fn, ok := mem.(*ssa.Function); ok {
+ visit.function(fn)
+ }
+ }
+ }
+ for _, T := range visit.prog.RuntimeTypes() {
+ mset := visit.prog.MethodSets.MethodSet(T)
+ for i, n := 0, mset.Len(); i < n; i++ {
+ visit.function(visit.prog.MethodValue(mset.At(i)))
+ }
+ }
+}
+
+func (visit *visitor) function(fn *ssa.Function) {
+ if !visit.seen[fn] {
+ visit.seen[fn] = true
+ var buf [10]*ssa.Value // avoid alloc in common case
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ for _, op := range instr.Operands(buf[:0]) {
+ if fn, ok := (*op).(*ssa.Function); ok {
+ visit.function(fn)
+ }
+ }
+ }
+ }
+ }
+}
+
+// MainPackages returns the subset of the specified packages
+// named "main" that define a main function.
+// The result may include synthetic "testmain" packages.
+func MainPackages(pkgs []*ssa.Package) []*ssa.Package {
+ var mains []*ssa.Package
+ for _, pkg := range pkgs {
+ if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil {
+ mains = append(mains, pkg)
+ }
+ }
+ return mains
+}
diff --git a/vendor/honnef.co/go/tools/ssa/testmain.go b/vendor/honnef.co/go/tools/ssa/testmain.go
new file mode 100644
index 0000000..2b89724
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/testmain.go
@@ -0,0 +1,266 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// CreateTestMainPackage synthesizes a main package that runs all the
+// tests of the supplied packages.
+// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
+//
+// TODO(adonovan): this file no longer needs to live in the ssa package.
+// Move it to ssautil.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/types"
+ "log"
+ "os"
+ "strings"
+ "text/template"
+)
+
+// FindTests returns the Test, Benchmark, and Example functions
+// (as defined by "go test") defined in the specified package,
+// and its TestMain function, if any.
+func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
+ prog := pkg.Prog
+
+ // The first two of these may be nil: if the program doesn't import "testing",
+ // it can't contain any tests, but it may yet contain Examples.
+ var testSig *types.Signature // func(*testing.T)
+ var benchmarkSig *types.Signature // func(*testing.B)
+ var exampleSig = types.NewSignature(nil, nil, nil, false) // func()
+
+ // Obtain the types from the parameters of testing.MainStart.
+ if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
+ mainStart := testingPkg.Func("MainStart")
+ params := mainStart.Signature.Params()
+ testSig = funcField(params.At(1).Type())
+ benchmarkSig = funcField(params.At(2).Type())
+
+ // Does the package define this function?
+ // func TestMain(*testing.M)
+ if f := pkg.Func("TestMain"); f != nil {
+ sig := f.Type().(*types.Signature)
+ starM := mainStart.Signature.Results().At(0).Type() // *testing.M
+ if sig.Results().Len() == 0 &&
+ sig.Params().Len() == 1 &&
+ types.Identical(sig.Params().At(0).Type(), starM) {
+ main = f
+ }
+ }
+ }
+
+ // TODO(adonovan): use a stable order, e.g. lexical.
+ for _, mem := range pkg.Members {
+ if f, ok := mem.(*Function); ok &&
+ ast.IsExported(f.Name()) &&
+ strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") {
+
+ switch {
+ case testSig != nil && isTestSig(f, "Test", testSig):
+ tests = append(tests, f)
+ case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig):
+ benchmarks = append(benchmarks, f)
+ case isTestSig(f, "Example", exampleSig):
+ examples = append(examples, f)
+ default:
+ continue
+ }
+ }
+ }
+ return
+}
+
+// Like isTest, but checks the signature too.
+func isTestSig(f *Function, prefix string, sig *types.Signature) bool {
+ return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig)
+}
+
+// Given the type of one of the three slice parameters of testing.Main,
+// returns the function type.
+func funcField(slice types.Type) *types.Signature {
+ return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature)
+}
+
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+// Plundered from $GOROOT/src/cmd/go/test.go
+func isTest(name, prefix string) bool {
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Test" is ok
+ return true
+ }
+ return ast.IsExported(name[len(prefix):])
+}
+
+// CreateTestMainPackage creates and returns a synthetic "testmain"
+// package for the specified package if it defines tests, benchmarks or
+// executable examples, or nil otherwise. The new package is named
+// "main" and provides a function named "main" that runs the tests,
+// similar to the one that would be created by the 'go test' tool.
+//
+// Subsequent calls to prog.AllPackages include the new package.
+// The package pkg must belong to the program prog.
+func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
+ if pkg.Prog != prog {
+ log.Fatal("Package does not belong to Program")
+ }
+
+ // Template data
+ var data struct {
+ Pkg *Package
+ Tests, Benchmarks, Examples []*Function
+ Main *Function
+ Go18 bool
+ }
+ data.Pkg = pkg
+
+ // Enumerate tests.
+ data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg)
+ if data.Main == nil &&
+ data.Tests == nil && data.Benchmarks == nil && data.Examples == nil {
+ return nil
+ }
+
+ // Synthesize source for testmain package.
+ path := pkg.Pkg.Path() + "$testmain"
+ tmpl := testmainTmpl
+ if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
+ // In Go 1.8, testing.MainStart's first argument is an interface, not a func.
+ data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type())
+ } else {
+ // The program does not import "testing", but FindTests
+ // returned non-nil, which must mean there were Examples
+ // but no Test, Benchmark, or TestMain functions.
+
+ // We'll simply call them from testmain.main; this will
+ // ensure they don't panic, but will not check any
+ // "Output:" comments.
+ // (We should not execute an Example that has no
+ // "Output:" comment, but it's impossible to tell here.)
+ tmpl = examplesOnlyTmpl
+ }
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, data); err != nil {
+ log.Fatalf("internal error expanding template for %s: %v", path, err)
+ }
+ if false { // debugging
+ fmt.Fprintln(os.Stderr, buf.String())
+ }
+
+ // Parse and type-check the testmain package.
+ f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0))
+ if err != nil {
+ log.Fatalf("internal error parsing %s: %v", path, err)
+ }
+ conf := types.Config{
+ DisableUnusedImportCheck: true,
+ Importer: importer{pkg},
+ }
+ files := []*ast.File{f}
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ testmainPkg, err := conf.Check(path, prog.Fset, files, info)
+ if err != nil {
+ log.Fatalf("internal error type-checking %s: %v", path, err)
+ }
+
+ // Create and build SSA code.
+ testmain := prog.CreatePackage(testmainPkg, files, info, false)
+ testmain.SetDebugMode(false)
+ testmain.Build()
+ testmain.Func("main").Synthetic = "test main function"
+ testmain.Func("init").Synthetic = "package initializer"
+ return testmain
+}
+
+// An implementation of types.Importer for an already loaded SSA program.
+type importer struct {
+ pkg *Package // package under test; may be non-importable
+}
+
+func (imp importer) Import(path string) (*types.Package, error) {
+ if p := imp.pkg.Prog.ImportedPackage(path); p != nil {
+ return p.Pkg, nil
+ }
+ if path == imp.pkg.Pkg.Path() {
+ return imp.pkg.Pkg, nil
+ }
+ return nil, fmt.Errorf("not found") // can't happen
+}
+
+var testmainTmpl = template.Must(template.New("testmain").Parse(`
+package main
+
+import "io"
+import "os"
+import "testing"
+import p {{printf "%q" .Pkg.Pkg.Path}}
+
+{{if .Go18}}
+type deps struct{}
+
+func (deps) MatchString(pat, str string) (bool, error) { return true, nil }
+func (deps) StartCPUProfile(io.Writer) error { return nil }
+func (deps) StopCPUProfile() {}
+func (deps) WriteHeapProfile(io.Writer) error { return nil }
+func (deps) WriteProfileTo(string, io.Writer, int) error { return nil }
+
+var match deps
+{{else}}
+func match(_, _ string) (bool, error) { return true, nil }
+{{end}}
+
+func main() {
+ tests := []testing.InternalTest{
+{{range .Tests}}
+ { {{printf "%q" .Name}}, p.{{.Name}} },
+{{end}}
+ }
+ benchmarks := []testing.InternalBenchmark{
+{{range .Benchmarks}}
+ { {{printf "%q" .Name}}, p.{{.Name}} },
+{{end}}
+ }
+ examples := []testing.InternalExample{
+{{range .Examples}}
+ {Name: {{printf "%q" .Name}}, F: p.{{.Name}}},
+{{end}}
+ }
+ m := testing.MainStart(match, tests, benchmarks, examples)
+{{with .Main}}
+ p.{{.Name}}(m)
+{{else}}
+ os.Exit(m.Run())
+{{end}}
+}
+
+`))
+
+var examplesOnlyTmpl = template.Must(template.New("examples").Parse(`
+package main
+
+import p {{printf "%q" .Pkg.Pkg.Path}}
+
+func main() {
+{{range .Examples}}
+ p.{{.Name}}()
+{{end}}
+}
+`))
diff --git a/vendor/honnef.co/go/tools/ssa/util.go b/vendor/honnef.co/go/tools/ssa/util.go
new file mode 100644
index 0000000..317a013
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/util.go
@@ -0,0 +1,121 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file defines a number of miscellaneous utility functions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+//// AST utilities
+
+func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
+
+// isBlankIdent returns true iff e is an Ident with name "_".
+// They have no associated types.Object, and thus no type.
+//
+func isBlankIdent(e ast.Expr) bool {
+ id, ok := e.(*ast.Ident)
+ return ok && id.Name == "_"
+}
+
+//// Type utilities. Some of these belong in go/types.
+
+// isPointer returns true for types whose underlying type is a pointer.
+func isPointer(typ types.Type) bool {
+ _, ok := typ.Underlying().(*types.Pointer)
+ return ok
+}
+
+func isInterface(T types.Type) bool { return types.IsInterface(T) }
+
+// deref returns a pointer's element type; otherwise it returns typ.
+func deref(typ types.Type) types.Type {
+ if p, ok := typ.Underlying().(*types.Pointer); ok {
+ return p.Elem()
+ }
+ return typ
+}
+
+// recvType returns the receiver type of method obj.
+func recvType(obj *types.Func) types.Type {
+ return obj.Type().(*types.Signature).Recv().Type()
+}
+
+// DefaultType returns the default "typed" type for an "untyped" type;
+// it returns the incoming type for all other types. The default type
+// for untyped nil is untyped nil.
+//
+// Exported to ssa/interp.
+//
+// TODO(gri): this is a copy of go/types.defaultType; export that function.
+//
+func DefaultType(typ types.Type) types.Type {
+ if t, ok := typ.(*types.Basic); ok {
+ k := t.Kind()
+ switch k {
+ case types.UntypedBool:
+ k = types.Bool
+ case types.UntypedInt:
+ k = types.Int
+ case types.UntypedRune:
+ k = types.Rune
+ case types.UntypedFloat:
+ k = types.Float64
+ case types.UntypedComplex:
+ k = types.Complex128
+ case types.UntypedString:
+ k = types.String
+ }
+ typ = types.Typ[k]
+ }
+ return typ
+}
+
+// logStack prints the formatted "start" message to stderr and
+// returns a closure that prints the corresponding "end" message.
+// Call using 'defer logStack(...)()' to show builder stack on panic.
+// Don't forget trailing parens!
+//
+func logStack(format string, args ...interface{}) func() {
+ msg := fmt.Sprintf(format, args...)
+ io.WriteString(os.Stderr, msg)
+ io.WriteString(os.Stderr, "\n")
+ return func() {
+ io.WriteString(os.Stderr, msg)
+ io.WriteString(os.Stderr, " end\n")
+ }
+}
+
+// newVar creates a 'var' for use in a types.Tuple.
+func newVar(name string, typ types.Type) *types.Var {
+ return types.NewParam(token.NoPos, nil, name, typ)
+}
+
+// anonVar creates an anonymous 'var' for use in a types.Tuple.
+func anonVar(typ types.Type) *types.Var {
+ return newVar("", typ)
+}
+
+var lenResults = types.NewTuple(anonVar(tInt))
+
+// makeLen returns the len builtin specialized to type func(T)int.
+func makeLen(T types.Type) *Builtin {
+ lenParams := types.NewTuple(anonVar(T))
+ return &Builtin{
+ name: "len",
+ sig: types.NewSignature(nil, lenParams, lenResults, false),
+ }
+}
diff --git a/vendor/honnef.co/go/tools/ssa/wrappers.go b/vendor/honnef.co/go/tools/ssa/wrappers.go
new file mode 100644
index 0000000..6ca01ab
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/wrappers.go
@@ -0,0 +1,296 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.5
+
+package ssa
+
+// This file defines synthesis of Functions that delegate to declared
+// methods; they come in three kinds:
+//
+// (1) wrappers: methods that wrap declared methods, performing
+// implicit pointer indirections and embedded field selections.
+//
+// (2) thunks: funcs that wrap declared methods. Like wrappers,
+// thunks perform indirections and field selections. The thunk's
+// first parameter is used as the receiver for the method call.
+//
+// (3) bounds: funcs that wrap declared methods. The bound's sole
+// free variable, supplied by a closure, is used as the receiver
+// for the method call. No indirections or field selections are
+// performed since they can be done before the call.
+
+import (
+ "fmt"
+
+ "go/types"
+)
+
+// -- wrappers -----------------------------------------------------------
+
+// makeWrapper returns a synthetic method that delegates to the
+// declared method denoted by meth.Obj(), first performing any
+// necessary pointer indirections or field selections implied by meth.
+//
+// The resulting method's receiver type is meth.Recv().
+//
+// This function is versatile but quite subtle! Consider the
+// following axes of variation when making changes:
+// - optional receiver indirection
+// - optional implicit field selections
+// - meth.Obj() may denote a concrete or an interface method
+// - the result may be a thunk or a wrapper.
+//
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
+//
+func makeWrapper(prog *Program, sel *types.Selection) *Function {
+ obj := sel.Obj().(*types.Func) // the declared function
+ sig := sel.Type().(*types.Signature) // type of this wrapper
+
+ var recv *types.Var // wrapper's receiver or thunk's params[0]
+ name := obj.Name()
+ var description string
+ var start int // first regular param
+ if sel.Kind() == types.MethodExpr {
+ name += "$thunk"
+ description = "thunk"
+ recv = sig.Params().At(0)
+ start = 1
+ } else {
+ description = "wrapper"
+ recv = sig.Recv()
+ }
+
+ description = fmt.Sprintf("%s for %s", description, sel.Obj())
+ if prog.mode&LogSource != 0 {
+ defer logStack("make %s to (%s)", description, recv.Type())()
+ }
+ fn := &Function{
+ name: name,
+ method: sel,
+ object: obj,
+ Signature: sig,
+ Synthetic: description,
+ Prog: prog,
+ pos: obj.Pos(),
+ }
+ fn.startBody()
+ fn.addSpilledParam(recv)
+ createParams(fn, start)
+
+ indices := sel.Index()
+
+ var v Value = fn.Locals[0] // spilled receiver
+ if isPointer(sel.Recv()) {
+ v = emitLoad(fn, v)
+
+ // For simple indirection wrappers, perform an informative nil-check:
+ // "value method (T).f called using nil *T pointer"
+ if len(indices) == 1 && !isPointer(recvType(obj)) {
+ var c Call
+ c.Call.Value = &Builtin{
+ name: "ssa:wrapnilchk",
+ sig: types.NewSignature(nil,
+ types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)),
+ types.NewTuple(anonVar(sel.Recv())), false),
+ }
+ c.Call.Args = []Value{
+ v,
+ stringConst(deref(sel.Recv()).String()),
+ stringConst(sel.Obj().Name()),
+ }
+ c.setType(v.Type())
+ v = fn.emit(&c)
+ }
+ }
+
+ // Invariant: v is a pointer, either
+ // value of *A receiver param, or
+ // address of A spilled receiver.
+
+ // We use pointer arithmetic (FieldAddr possibly followed by
+ // Load) in preference to value extraction (Field possibly
+ // preceded by Load).
+
+ v = emitImplicitSelections(fn, v, indices[:len(indices)-1])
+
+ // Invariant: v is a pointer, either
+ // value of implicit *C field, or
+ // address of implicit C field.
+
+ var c Call
+ if r := recvType(obj); !isInterface(r) { // concrete method
+ if !isPointer(r) {
+ v = emitLoad(fn, v)
+ }
+ c.Call.Value = prog.declaredFunc(obj)
+ c.Call.Args = append(c.Call.Args, v)
+ } else {
+ c.Call.Method = obj
+ c.Call.Value = emitLoad(fn, v)
+ }
+ for _, arg := range fn.Params[1:] {
+ c.Call.Args = append(c.Call.Args, arg)
+ }
+ emitTailCall(fn, &c)
+ fn.finishBody()
+ return fn
+}
+
+// createParams creates parameters for wrapper method fn based on its
+// Signature.Params, which do not include the receiver.
+// start is the index of the first regular parameter to use.
+//
+func createParams(fn *Function, start int) {
+ var last *Parameter
+ tparams := fn.Signature.Params()
+ for i, n := start, tparams.Len(); i < n; i++ {
+ last = fn.addParamObj(tparams.At(i))
+ }
+ if fn.Signature.Variadic() {
+ last.typ = types.NewSlice(last.typ)
+ }
+}
+
+// -- bounds -----------------------------------------------------------
+
+// makeBound returns a bound method wrapper (or "bound"), a synthetic
+// function that delegates to a concrete or interface method denoted
+// by obj. The resulting function has no receiver, but has one free
+// variable which will be used as the method's receiver in the
+// tail-call.
+//
+// Use MakeClosure with such a wrapper to construct a bound method
+// closure. e.g.:
+//
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// var t T
+// f := t.meth
+// f() // calls t.meth()
+//
+// f is a closure of a synthetic wrapper defined as if by:
+//
+// f := func() { return t.meth() }
+//
+// Unlike makeWrapper, makeBound need perform no indirection or field
+// selections because that can be done before the closure is
+// constructed.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
+//
+func makeBound(prog *Program, obj *types.Func) *Function {
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+ fn, ok := prog.bounds[obj]
+ if !ok {
+ description := fmt.Sprintf("bound method wrapper for %s", obj)
+ if prog.mode&LogSource != 0 {
+ defer logStack("%s", description)()
+ }
+ fn = &Function{
+ name: obj.Name() + "$bound",
+ object: obj,
+ Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver
+ Synthetic: description,
+ Prog: prog,
+ pos: obj.Pos(),
+ }
+
+ fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn}
+ fn.FreeVars = []*FreeVar{fv}
+ fn.startBody()
+ createParams(fn, 0)
+ var c Call
+
+ if !isInterface(recvType(obj)) { // concrete
+ c.Call.Value = prog.declaredFunc(obj)
+ c.Call.Args = []Value{fv}
+ } else {
+ c.Call.Value = fv
+ c.Call.Method = obj
+ }
+ for _, arg := range fn.Params {
+ c.Call.Args = append(c.Call.Args, arg)
+ }
+ emitTailCall(fn, &c)
+ fn.finishBody()
+
+ prog.bounds[obj] = fn
+ }
+ return fn
+}
+
+// -- thunks -----------------------------------------------------------
+
+// makeThunk returns a thunk, a synthetic function that delegates to a
+// concrete or interface method denoted by sel.Obj(). The resulting
+// function has no receiver, but has an additional (first) regular
+// parameter.
+//
+// Precondition: sel.Kind() == types.MethodExpr.
+//
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// f := T.meth
+// var t T
+// f(t) // calls t.meth()
+//
+// f is a synthetic wrapper defined as if by:
+//
+// f := func(t T) { return t.meth() }
+//
+// TODO(adonovan): opt: currently the stub is created even when used
+// directly in a function call: C.f(i, 0). This is less efficient
+// than inlining the stub.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
+//
+func makeThunk(prog *Program, sel *types.Selection) *Function {
+ if sel.Kind() != types.MethodExpr {
+ panic(sel)
+ }
+
+ key := selectionKey{
+ kind: sel.Kind(),
+ recv: sel.Recv(),
+ obj: sel.Obj(),
+ index: fmt.Sprint(sel.Index()),
+ indirect: sel.Indirect(),
+ }
+
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ // Canonicalize key.recv to avoid constructing duplicate thunks.
+ canonRecv, ok := prog.canon.At(key.recv).(types.Type)
+ if !ok {
+ canonRecv = key.recv
+ prog.canon.Set(key.recv, canonRecv)
+ }
+ key.recv = canonRecv
+
+ fn, ok := prog.thunks[key]
+ if !ok {
+ fn = makeWrapper(prog, sel)
+ if fn.Signature.Recv() != nil {
+ panic(fn) // unexpected receiver
+ }
+ prog.thunks[key] = fn
+ }
+ return fn
+}
+
+func changeRecv(s *types.Signature, recv *types.Var) *types.Signature {
+ return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic())
+}
+
+// selectionKey is like types.Selection but a usable map key.
+type selectionKey struct {
+ kind types.SelectionKind
+ recv types.Type // canonicalized via Program.canon
+ obj types.Object
+ index string
+ indirect bool
+}
diff --git a/vendor/honnef.co/go/tools/ssa/write.go b/vendor/honnef.co/go/tools/ssa/write.go
new file mode 100644
index 0000000..89761a1
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/write.go
@@ -0,0 +1,5 @@
+package ssa
+
+func NewJump(parent *BasicBlock) *Jump {
+ return &Jump{anInstruction{parent}}
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/buildtag.go b/vendor/honnef.co/go/tools/staticcheck/buildtag.go
new file mode 100644
index 0000000..27c31c0
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/buildtag.go
@@ -0,0 +1,21 @@
+package staticcheck
+
+import (
+ "go/ast"
+ "strings"
+
+ "honnef.co/go/tools/lint"
+)
+
+func buildTags(f *ast.File) [][]string {
+ var out [][]string
+ for _, line := range strings.Split(lint.Preamble(f), "\n") {
+ if !strings.HasPrefix(line, "+build ") {
+ continue
+ }
+ line = strings.TrimSpace(strings.TrimPrefix(line, "+build "))
+ fields := strings.Fields(line)
+ out = append(out, fields)
+ }
+ return out
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go
new file mode 100644
index 0000000..fdc395e
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/lint.go
@@ -0,0 +1,2738 @@
+// Package staticcheck contains a linter for Go source code.
+package staticcheck // import "honnef.co/go/tools/staticcheck"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ htmltemplate "html/template"
+ "net/http"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ texttemplate "text/template"
+
+ "honnef.co/go/tools/deprecated"
+ "honnef.co/go/tools/functions"
+ "honnef.co/go/tools/internal/sharedcheck"
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/staticcheck/vrp"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+func validRegexp(call *Call) {
+ arg := call.Args[0]
+ err := ValidateRegexp(arg.Value)
+ if err != nil {
+ arg.Invalid(err.Error())
+ }
+}
+
+type runeSlice []rune
+
+func (rs runeSlice) Len() int { return len(rs) }
+func (rs runeSlice) Less(i int, j int) bool { return rs[i] < rs[j] }
+func (rs runeSlice) Swap(i int, j int) { rs[i], rs[j] = rs[j], rs[i] }
+
+func utf8Cutset(call *Call) {
+ arg := call.Args[1]
+ if InvalidUTF8(arg.Value) {
+ arg.Invalid(MsgInvalidUTF8)
+ }
+}
+
+func uniqueCutset(call *Call) {
+ arg := call.Args[1]
+ if !UniqueStringCutset(arg.Value) {
+ arg.Invalid(MsgNonUniqueCutset)
+ }
+}
+
+func unmarshalPointer(name string, arg int) CallCheck {
+ return func(call *Call) {
+ if !Pointer(call.Args[arg].Value) {
+ call.Args[arg].Invalid(fmt.Sprintf("%s expects to unmarshal into a pointer, but the provided value is not a pointer", name))
+ }
+ }
+}
+
+func pointlessIntMath(call *Call) {
+ if ConvertedFromInt(call.Args[0].Value) {
+ call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", lint.CallName(call.Instr.Common())))
+ }
+}
+
+func checkValidHostPort(arg int) CallCheck {
+ return func(call *Call) {
+ if !ValidHostPort(call.Args[arg].Value) {
+ call.Args[arg].Invalid(MsgInvalidHostPort)
+ }
+ }
+}
+
+var (
+ checkRegexpRules = map[string]CallCheck{
+ "regexp.MustCompile": validRegexp,
+ "regexp.Compile": validRegexp,
+ }
+
+ checkTimeParseRules = map[string]CallCheck{
+ "time.Parse": func(call *Call) {
+ arg := call.Args[0]
+ err := ValidateTimeLayout(arg.Value)
+ if err != nil {
+ arg.Invalid(err.Error())
+ }
+ },
+ }
+
+ checkEncodingBinaryRules = map[string]CallCheck{
+ "encoding/binary.Write": func(call *Call) {
+ arg := call.Args[2]
+ if !CanBinaryMarshal(call.Job, arg.Value) {
+ arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type()))
+ }
+ },
+ }
+
+ checkURLsRules = map[string]CallCheck{
+ "net/url.Parse": func(call *Call) {
+ arg := call.Args[0]
+ err := ValidateURL(arg.Value)
+ if err != nil {
+ arg.Invalid(err.Error())
+ }
+ },
+ }
+
+ checkSyncPoolValueRules = map[string]CallCheck{
+ "(*sync.Pool).Put": func(call *Call) {
+ arg := call.Args[0]
+ typ := arg.Value.Value.Type()
+ if !lint.IsPointerLike(typ) {
+ arg.Invalid("argument should be pointer-like to avoid allocations")
+ }
+ },
+ }
+
+ checkRegexpFindAllRules = map[string]CallCheck{
+ "(*regexp.Regexp).FindAll": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllIndex": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllString": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllStringIndex": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllStringSubmatch": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllStringSubmatchIndex": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllSubmatch": RepeatZeroTimes("a FindAll method", 1),
+ "(*regexp.Regexp).FindAllSubmatchIndex": RepeatZeroTimes("a FindAll method", 1),
+ }
+
+ checkUTF8CutsetRules = map[string]CallCheck{
+ "strings.IndexAny": utf8Cutset,
+ "strings.LastIndexAny": utf8Cutset,
+ "strings.ContainsAny": utf8Cutset,
+ "strings.Trim": utf8Cutset,
+ "strings.TrimLeft": utf8Cutset,
+ "strings.TrimRight": utf8Cutset,
+ }
+
+ checkUniqueCutsetRules = map[string]CallCheck{
+ "strings.Trim": uniqueCutset,
+ "strings.TrimLeft": uniqueCutset,
+ "strings.TrimRight": uniqueCutset,
+ }
+
+ checkUnmarshalPointerRules = map[string]CallCheck{
+ "encoding/xml.Unmarshal": unmarshalPointer("xml.Unmarshal", 1),
+ "(*encoding/xml.Decoder).Decode": unmarshalPointer("Decode", 0),
+ "encoding/json.Unmarshal": unmarshalPointer("json.Unmarshal", 1),
+ "(*encoding/json.Decoder).Decode": unmarshalPointer("Decode", 0),
+ }
+
+ checkUnbufferedSignalChanRules = map[string]CallCheck{
+ "os/signal.Notify": func(call *Call) {
+ arg := call.Args[0]
+ if UnbufferedChannel(arg.Value) {
+ arg.Invalid("the channel used with signal.Notify should be buffered")
+ }
+ },
+ }
+
+ checkMathIntRules = map[string]CallCheck{
+ "math.Ceil": pointlessIntMath,
+ "math.Floor": pointlessIntMath,
+ "math.IsNaN": pointlessIntMath,
+ "math.Trunc": pointlessIntMath,
+ "math.IsInf": pointlessIntMath,
+ }
+
+ checkStringsReplaceZeroRules = map[string]CallCheck{
+ "strings.Replace": RepeatZeroTimes("strings.Replace", 3),
+ "bytes.Replace": RepeatZeroTimes("bytes.Replace", 3),
+ }
+
+ checkListenAddressRules = map[string]CallCheck{
+ "net/http.ListenAndServe": checkValidHostPort(0),
+ "net/http.ListenAndServeTLS": checkValidHostPort(0),
+ }
+
+ checkBytesEqualIPRules = map[string]CallCheck{
+ "bytes.Equal": func(call *Call) {
+ if ConvertedFrom(call.Args[0].Value, "net.IP") && ConvertedFrom(call.Args[1].Value, "net.IP") {
+ call.Invalid("use net.IP.Equal to compare net.IPs, not bytes.Equal")
+ }
+ },
+ }
+
+ checkRegexpMatchLoopRules = map[string]CallCheck{
+ "regexp.Match": loopedRegexp("regexp.Match"),
+ "regexp.MatchReader": loopedRegexp("regexp.MatchReader"),
+ "regexp.MatchString": loopedRegexp("regexp.MatchString"),
+ }
+)
+
+type Checker struct {
+ CheckGenerated bool
+ funcDescs *functions.Descriptions
+ deprecatedObjs map[types.Object]string
+ nodeFns map[ast.Node]*ssa.Function
+}
+
+func NewChecker() *Checker {
+ return &Checker{}
+}
+
+func (*Checker) Name() string { return "staticcheck" }
+func (*Checker) Prefix() string { return "SA" }
+
+func (c *Checker) Funcs() map[string]lint.Func {
+ return map[string]lint.Func{
+ "SA1000": c.callChecker(checkRegexpRules),
+ "SA1001": c.CheckTemplate,
+ "SA1002": c.callChecker(checkTimeParseRules),
+ "SA1003": c.callChecker(checkEncodingBinaryRules),
+ "SA1004": c.CheckTimeSleepConstant,
+ "SA1005": c.CheckExec,
+ "SA1006": c.CheckUnsafePrintf,
+ "SA1007": c.callChecker(checkURLsRules),
+ "SA1008": c.CheckCanonicalHeaderKey,
+ "SA1009": nil,
+ "SA1010": c.callChecker(checkRegexpFindAllRules),
+ "SA1011": c.callChecker(checkUTF8CutsetRules),
+ "SA1012": c.CheckNilContext,
+ "SA1013": c.CheckSeeker,
+ "SA1014": c.callChecker(checkUnmarshalPointerRules),
+ "SA1015": c.CheckLeakyTimeTick,
+ "SA1016": c.CheckUntrappableSignal,
+ "SA1017": c.callChecker(checkUnbufferedSignalChanRules),
+ "SA1018": c.callChecker(checkStringsReplaceZeroRules),
+ "SA1019": c.CheckDeprecated,
+ "SA1020": c.callChecker(checkListenAddressRules),
+ "SA1021": c.callChecker(checkBytesEqualIPRules),
+ "SA1022": nil,
+ "SA1023": c.CheckWriterBufferModified,
+ "SA1024": c.callChecker(checkUniqueCutsetRules),
+
+ "SA2000": c.CheckWaitgroupAdd,
+ "SA2001": c.CheckEmptyCriticalSection,
+ "SA2002": c.CheckConcurrentTesting,
+ "SA2003": c.CheckDeferLock,
+
+ "SA3000": c.CheckTestMainExit,
+ "SA3001": c.CheckBenchmarkN,
+
+ "SA4000": c.CheckLhsRhsIdentical,
+ "SA4001": c.CheckIneffectiveCopy,
+ "SA4002": c.CheckDiffSizeComparison,
+ "SA4003": c.CheckUnsignedComparison,
+ "SA4004": c.CheckIneffectiveLoop,
+ "SA4005": nil,
+ "SA4006": c.CheckUnreadVariableValues,
+ // "SA4007": c.CheckPredeterminedBooleanExprs,
+ "SA4007": nil,
+ "SA4008": c.CheckLoopCondition,
+ "SA4009": c.CheckArgOverwritten,
+ "SA4010": c.CheckIneffectiveAppend,
+ "SA4011": c.CheckScopedBreak,
+ "SA4012": c.CheckNaNComparison,
+ "SA4013": c.CheckDoubleNegation,
+ "SA4014": c.CheckRepeatedIfElse,
+ "SA4015": c.callChecker(checkMathIntRules),
+ "SA4016": c.CheckSillyBitwiseOps,
+ "SA4017": c.CheckPureFunctions,
+ "SA4018": c.CheckSelfAssignment,
+ "SA4019": c.CheckDuplicateBuildConstraints,
+
+ "SA5000": c.CheckNilMaps,
+ "SA5001": c.CheckEarlyDefer,
+ "SA5002": c.CheckInfiniteEmptyLoop,
+ "SA5003": c.CheckDeferInInfiniteLoop,
+ "SA5004": c.CheckLoopEmptyDefault,
+ "SA5005": c.CheckCyclicFinalizer,
+ // "SA5006": c.CheckSliceOutOfBounds,
+ "SA5007": c.CheckInfiniteRecursion,
+
+ "SA6000": c.callChecker(checkRegexpMatchLoopRules),
+ "SA6001": c.CheckMapBytesKey,
+ "SA6002": c.callChecker(checkSyncPoolValueRules),
+ "SA6003": c.CheckRangeStringRunes,
+ "SA6004": nil,
+
+ "SA9000": nil,
+ "SA9001": c.CheckDubiousDeferInChannelRangeLoop,
+ "SA9002": c.CheckNonOctalFileMode,
+ "SA9003": c.CheckEmptyBranch,
+ }
+}
+
+func (c *Checker) filterGenerated(files []*ast.File) []*ast.File {
+ if c.CheckGenerated {
+ return files
+ }
+ var out []*ast.File
+ for _, f := range files {
+ if !lint.IsGenerated(f) {
+ out = append(out, f)
+ }
+ }
+ return out
+}
+
+func (c *Checker) deprecateObject(m map[types.Object]string, prog *lint.Program, obj types.Object) {
+ if obj.Pkg() == nil {
+ return
+ }
+
+ f := prog.File(obj)
+ if f == nil {
+ return
+ }
+ msg := c.deprecationMessage(f, prog.Prog.Fset, obj)
+ if msg != "" {
+ m[obj] = msg
+ }
+}
+
+func (c *Checker) Init(prog *lint.Program) {
+ wg := &sync.WaitGroup{}
+ wg.Add(3)
+ go func() {
+ c.funcDescs = functions.NewDescriptions(prog.SSA)
+ for _, fn := range prog.AllFunctions {
+ if fn.Blocks != nil {
+ applyStdlibKnowledge(fn)
+ ssa.OptimizeBlocks(fn)
+ }
+ }
+ wg.Done()
+ }()
+
+ go func() {
+ c.nodeFns = lint.NodeFns(prog.Packages)
+ wg.Done()
+ }()
+
+ go func() {
+ c.deprecatedObjs = map[types.Object]string{}
+ for _, ssapkg := range prog.SSA.AllPackages() {
+ ssapkg := ssapkg
+ for _, member := range ssapkg.Members {
+ obj := member.Object()
+ if obj == nil {
+ continue
+ }
+ c.deprecateObject(c.deprecatedObjs, prog, obj)
+ if typ, ok := obj.Type().(*types.Named); ok {
+ for i := 0; i < typ.NumMethods(); i++ {
+ meth := typ.Method(i)
+ c.deprecateObject(c.deprecatedObjs, prog, meth)
+ }
+
+ if iface, ok := typ.Underlying().(*types.Interface); ok {
+ for i := 0; i < iface.NumExplicitMethods(); i++ {
+ meth := iface.ExplicitMethod(i)
+ c.deprecateObject(c.deprecatedObjs, prog, meth)
+ }
+ }
+ }
+ if typ, ok := obj.Type().Underlying().(*types.Struct); ok {
+ n := typ.NumFields()
+ for i := 0; i < n; i++ {
+ // FIXME(dh): This code will not find deprecated
+ // fields in anonymous structs.
+ field := typ.Field(i)
+ c.deprecateObject(c.deprecatedObjs, prog, field)
+ }
+ }
+ }
+ }
+ wg.Done()
+ }()
+
+ wg.Wait()
+}
+
+func (c *Checker) deprecationMessage(file *ast.File, fset *token.FileSet, obj types.Object) (message string) {
+ pos := obj.Pos()
+ path, _ := astutil.PathEnclosingInterval(file, pos, pos)
+ if len(path) <= 2 {
+ return ""
+ }
+ var docs []*ast.CommentGroup
+ switch n := path[1].(type) {
+ case *ast.FuncDecl:
+ docs = append(docs, n.Doc)
+ case *ast.Field:
+ docs = append(docs, n.Doc)
+ case *ast.ValueSpec:
+ docs = append(docs, n.Doc)
+ if len(path) >= 3 {
+ if n, ok := path[2].(*ast.GenDecl); ok {
+ docs = append(docs, n.Doc)
+ }
+ }
+ case *ast.TypeSpec:
+ docs = append(docs, n.Doc)
+ if len(path) >= 3 {
+ if n, ok := path[2].(*ast.GenDecl); ok {
+ docs = append(docs, n.Doc)
+ }
+ }
+ default:
+ return ""
+ }
+
+ for _, doc := range docs {
+ if doc == nil {
+ continue
+ }
+ parts := strings.Split(doc.Text(), "\n\n")
+ last := parts[len(parts)-1]
+ if !strings.HasPrefix(last, "Deprecated: ") {
+ continue
+ }
+ alt := last[len("Deprecated: "):]
+ alt = strings.Replace(alt, "\n", " ", -1)
+ return alt
+ }
+ return ""
+}
+
+func (c *Checker) isInLoop(b *ssa.BasicBlock) bool {
+ sets := c.funcDescs.Get(b.Parent()).Loops
+ for _, set := range sets {
+ if set[b] {
+ return true
+ }
+ }
+ return false
+}
+
+func applyStdlibKnowledge(fn *ssa.Function) {
+ if len(fn.Blocks) == 0 {
+ return
+ }
+
+ // comma-ok receiving from a time.Tick channel will never return
+ // ok == false, so any branching on the value of ok can be
+ // replaced with an unconditional jump. This will primarily match
+ // `for range time.Tick(x)` loops, but it can also match
+ // user-written code.
+ for _, block := range fn.Blocks {
+ if len(block.Instrs) < 3 {
+ continue
+ }
+ if len(block.Succs) != 2 {
+ continue
+ }
+ var instrs []*ssa.Instruction
+ for i, ins := range block.Instrs {
+ if _, ok := ins.(*ssa.DebugRef); ok {
+ continue
+ }
+ instrs = append(instrs, &block.Instrs[i])
+ }
+
+ for i, ins := range instrs {
+ unop, ok := (*ins).(*ssa.UnOp)
+ if !ok || unop.Op != token.ARROW {
+ continue
+ }
+ call, ok := unop.X.(*ssa.Call)
+ if !ok {
+ continue
+ }
+ if !lint.IsCallTo(call.Common(), "time.Tick") {
+ continue
+ }
+ ex, ok := (*instrs[i+1]).(*ssa.Extract)
+ if !ok || ex.Tuple != unop || ex.Index != 1 {
+ continue
+ }
+
+ ifstmt, ok := (*instrs[i+2]).(*ssa.If)
+ if !ok || ifstmt.Cond != ex {
+ continue
+ }
+
+ *instrs[i+2] = ssa.NewJump(block)
+ succ := block.Succs[1]
+ block.Succs = block.Succs[0:1]
+ succ.RemovePred(block)
+ }
+ }
+}
+
+func hasType(j *lint.Job, expr ast.Expr, name string) bool {
+ return types.TypeString(j.Program.Info.TypeOf(expr), nil) == name
+}
+
+func (c *Checker) CheckUntrappableSignal(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAnyAST(call,
+ "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") {
+ return true
+ }
+ for _, arg := range call.Args {
+ if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") {
+ arg = conv.Args[0]
+ }
+
+ if isName(j, arg, "os.Kill") || isName(j, arg, "syscall.SIGKILL") {
+ j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", j.Render(arg))
+ }
+ if isName(j, arg, "syscall.SIGSTOP") {
+ j.Errorf(arg, "%s signal cannot be trapped", j.Render(arg))
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckTemplate(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ var kind string
+ if j.IsCallToAST(call, "(*text/template.Template).Parse") {
+ kind = "text"
+ } else if j.IsCallToAST(call, "(*html/template.Template).Parse") {
+ kind = "html"
+ } else {
+ return true
+ }
+ sel := call.Fun.(*ast.SelectorExpr)
+ if !j.IsCallToAST(sel.X, "text/template.New") &&
+ !j.IsCallToAST(sel.X, "html/template.New") {
+ // TODO(dh): this is a cheap workaround for templates with
+ // different delims. A better solution with less false
+ // negatives would use data flow analysis to see where the
+ // template comes from and where it has been
+ return true
+ }
+ s, ok := j.ExprToString(call.Args[0])
+ if !ok {
+ return true
+ }
+ var err error
+ switch kind {
+ case "text":
+ _, err = texttemplate.New("").Parse(s)
+ case "html":
+ _, err = htmltemplate.New("").Parse(s)
+ }
+ if err != nil {
+ // TODO(dominikh): whitelist other parse errors, if any
+ if strings.Contains(err.Error(), "unexpected") {
+ j.Errorf(call.Args[0], "%s", err)
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckTimeSleepConstant(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(call, "time.Sleep") {
+ return true
+ }
+ lit, ok := call.Args[0].(*ast.BasicLit)
+ if !ok {
+ return true
+ }
+ n, err := strconv.Atoi(lit.Value)
+ if err != nil {
+ return true
+ }
+ if n == 0 || n > 120 {
+ // time.Sleep(0) is a seldomly used pattern in concurrency
+ // tests. >120 might be intentional. 120 was chosen
+ // because the user could've meant 2 minutes.
+ return true
+ }
+ recommendation := "time.Sleep(time.Nanosecond)"
+ if n != 1 {
+ recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n)
+ }
+ j.Errorf(call.Args[0], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation)
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckWaitgroupAdd(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ g, ok := node.(*ast.GoStmt)
+ if !ok {
+ return true
+ }
+ fun, ok := g.Call.Fun.(*ast.FuncLit)
+ if !ok {
+ return true
+ }
+ if len(fun.Body.List) == 0 {
+ return true
+ }
+ stmt, ok := fun.Body.List[0].(*ast.ExprStmt)
+ if !ok {
+ return true
+ }
+ call, ok := stmt.X.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func)
+ if !ok {
+ return true
+ }
+ if fn.FullName() == "(*sync.WaitGroup).Add" {
+ j.Errorf(sel, "should call %s before starting the goroutine to avoid a race",
+ j.Render(stmt))
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.ForStmt)
+ if !ok || len(loop.Body.List) != 0 || loop.Post != nil {
+ return true
+ }
+
+ if loop.Init != nil {
+ // TODO(dh): this isn't strictly necessary, it just makes
+ // the check easier.
+ return true
+ }
+ // An empty loop is bad news in two cases: 1) The loop has no
+ // condition. In that case, it's just a loop that spins
+ // forever and as fast as it can, keeping a core busy. 2) The
+ // loop condition only consists of variable or field reads and
+ // operators on those. The only way those could change their
+ // value is with unsynchronised access, which constitutes a
+ // data race.
+ //
+ // If the condition contains any function calls, its behaviour
+ // is dynamic and the loop might terminate. Similarly for
+ // channel receives.
+
+ if loop.Cond != nil && hasSideEffects(loop.Cond) {
+ return true
+ }
+
+ j.Errorf(loop, "this loop will spin, using 100%% CPU")
+ if loop.Cond != nil {
+ j.Errorf(loop, "loop condition never changes or has a race condition")
+ }
+
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ mightExit := false
+ var defers []ast.Stmt
+ loop, ok := node.(*ast.ForStmt)
+ if !ok || loop.Cond != nil {
+ return true
+ }
+ fn2 := func(node ast.Node) bool {
+ switch stmt := node.(type) {
+ case *ast.ReturnStmt:
+ mightExit = true
+ case *ast.BranchStmt:
+ // TODO(dominikh): if this sees a break in a switch or
+ // select, it doesn't check if it breaks the loop or
+ // just the select/switch. This causes some false
+ // negatives.
+ if stmt.Tok == token.BREAK {
+ mightExit = true
+ }
+ case *ast.DeferStmt:
+ defers = append(defers, stmt)
+ case *ast.FuncLit:
+ // Don't look into function bodies
+ return false
+ }
+ return true
+ }
+ ast.Inspect(loop.Body, fn2)
+ if mightExit {
+ return true
+ }
+ for _, stmt := range defers {
+ j.Errorf(stmt, "defers in this infinite loop will never run")
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return true
+ }
+ typ := j.Program.Info.TypeOf(loop.X)
+ _, ok = typ.Underlying().(*types.Chan)
+ if !ok {
+ return true
+ }
+ fn2 := func(node ast.Node) bool {
+ switch stmt := node.(type) {
+ case *ast.DeferStmt:
+ j.Errorf(stmt, "defers in this range loop won't run unless the channel gets closed")
+ case *ast.FuncLit:
+ // Don't look into function bodies
+ return false
+ }
+ return true
+ }
+ ast.Inspect(loop.Body, fn2)
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckTestMainExit(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ if !isTestMain(j, node) {
+ return true
+ }
+
+ arg := j.Program.Info.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0])
+ callsRun := false
+ fn2 := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ ident, ok := sel.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if arg != j.Program.Info.ObjectOf(ident) {
+ return true
+ }
+ if sel.Sel.Name == "Run" {
+ callsRun = true
+ return false
+ }
+ return true
+ }
+ ast.Inspect(node.(*ast.FuncDecl).Body, fn2)
+
+ callsExit := false
+ fn3 := func(node ast.Node) bool {
+ if j.IsCallToAST(node, "os.Exit") {
+ callsExit = true
+ return false
+ }
+ return true
+ }
+ ast.Inspect(node.(*ast.FuncDecl).Body, fn3)
+ if !callsExit && callsRun {
+ j.Errorf(node, "TestMain should call os.Exit to set exit code")
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func isTestMain(j *lint.Job, node ast.Node) bool {
+ decl, ok := node.(*ast.FuncDecl)
+ if !ok {
+ return false
+ }
+ if decl.Name.Name != "TestMain" {
+ return false
+ }
+ if len(decl.Type.Params.List) != 1 {
+ return false
+ }
+ arg := decl.Type.Params.List[0]
+ if len(arg.Names) != 1 {
+ return false
+ }
+ typ := j.Program.Info.TypeOf(arg.Type)
+ return typ != nil && typ.String() == "*testing.M"
+}
+
+func (c *Checker) CheckExec(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAST(call, "os/exec.Command") {
+ return true
+ }
+ val, ok := j.ExprToString(call.Args[0])
+ if !ok {
+ return true
+ }
+ if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") {
+ return true
+ }
+ j.Errorf(call.Args[0], "first argument to exec.Command looks like a shell command, but a program name or path are expected")
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.ForStmt)
+ if !ok || len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil {
+ return true
+ }
+ sel, ok := loop.Body.List[0].(*ast.SelectStmt)
+ if !ok {
+ return true
+ }
+ for _, c := range sel.Body.List {
+ if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 {
+ j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.")
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ op, ok := node.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ switch op.Op {
+ case token.EQL, token.NEQ:
+ if basic, ok := j.Program.Info.TypeOf(op.X).(*types.Basic); ok {
+ if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 {
+ // f == f and f != f might be used to check for NaN
+ return true
+ }
+ }
+ case token.SUB, token.QUO, token.AND, token.REM, token.OR, token.XOR, token.AND_NOT,
+ token.LAND, token.LOR, token.LSS, token.GTR, token.LEQ, token.GEQ:
+ default:
+ // For some ops, such as + and *, it can make sense to
+ // have identical operands
+ return true
+ }
+
+ if j.Render(op.X) != j.Render(op.Y) {
+ return true
+ }
+ j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op)
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckScopedBreak(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ var body *ast.BlockStmt
+ switch node := node.(type) {
+ case *ast.ForStmt:
+ body = node.Body
+ case *ast.RangeStmt:
+ body = node.Body
+ default:
+ return true
+ }
+ for _, stmt := range body.List {
+ var blocks [][]ast.Stmt
+ switch stmt := stmt.(type) {
+ case *ast.SwitchStmt:
+ for _, c := range stmt.Body.List {
+ blocks = append(blocks, c.(*ast.CaseClause).Body)
+ }
+ case *ast.SelectStmt:
+ for _, c := range stmt.Body.List {
+ blocks = append(blocks, c.(*ast.CommClause).Body)
+ }
+ default:
+ continue
+ }
+
+ for _, body := range blocks {
+ if len(body) == 0 {
+ continue
+ }
+ lasts := []ast.Stmt{body[len(body)-1]}
+ // TODO(dh): unfold all levels of nested block
+ // statements, not just a single level if statement
+ if ifs, ok := lasts[0].(*ast.IfStmt); ok {
+ if len(ifs.Body.List) == 0 {
+ continue
+ }
+ lasts[0] = ifs.Body.List[len(ifs.Body.List)-1]
+
+ if block, ok := ifs.Else.(*ast.BlockStmt); ok {
+ if len(block.List) != 0 {
+ lasts = append(lasts, block.List[len(block.List)-1])
+ }
+ }
+ }
+ for _, last := range lasts {
+ branch, ok := last.(*ast.BranchStmt)
+ if !ok || branch.Tok != token.BREAK || branch.Label != nil {
+ continue
+ }
+ j.Errorf(branch, "ineffective break statement. Did you mean to break out of the outer loop?")
+ }
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckUnsafePrintf(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !j.IsCallToAnyAST(call, "fmt.Printf", "fmt.Sprintf", "log.Printf") {
+ return true
+ }
+ if len(call.Args) != 1 {
+ return true
+ }
+ switch call.Args[0].(type) {
+ case *ast.CallExpr, *ast.Ident:
+ default:
+ return true
+ }
+ j.Errorf(call.Args[0], "printf-style function with dynamic first argument and no further arguments should use print-style function instead")
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckEarlyDefer(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return true
+ }
+ if len(block.List) < 2 {
+ return true
+ }
+ for i, stmt := range block.List {
+ if i == len(block.List)-1 {
+ break
+ }
+ assign, ok := stmt.(*ast.AssignStmt)
+ if !ok {
+ continue
+ }
+ if len(assign.Rhs) != 1 {
+ continue
+ }
+ if len(assign.Lhs) < 2 {
+ continue
+ }
+ if lhs, ok := assign.Lhs[len(assign.Lhs)-1].(*ast.Ident); ok && lhs.Name == "_" {
+ continue
+ }
+ call, ok := assign.Rhs[0].(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature)
+ if !ok {
+ continue
+ }
+ if sig.Results().Len() < 2 {
+ continue
+ }
+ last := sig.Results().At(sig.Results().Len() - 1)
+ // FIXME(dh): check that it's error from universe, not
+ // another type of the same name
+ if last.Type().String() != "error" {
+ continue
+ }
+ lhs, ok := assign.Lhs[0].(*ast.Ident)
+ if !ok {
+ continue
+ }
+ def, ok := block.List[i+1].(*ast.DeferStmt)
+ if !ok {
+ continue
+ }
+ sel, ok := def.Call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ continue
+ }
+ ident, ok := selectorX(sel).(*ast.Ident)
+ if !ok {
+ continue
+ }
+ if ident.Obj != lhs.Obj {
+ continue
+ }
+ if sel.Sel.Name != "Close" {
+ continue
+ }
+ j.Errorf(def, "should check returned error before deferring %s", j.Render(def.Call))
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func selectorX(sel *ast.SelectorExpr) ast.Node {
+ switch x := sel.X.(type) {
+ case *ast.SelectorExpr:
+ return selectorX(x)
+ default:
+ return x
+ }
+}
+
+func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) {
+ // Initially it might seem like this check would be easier to
+ // implement in SSA. After all, we're only checking for two
+ // consecutive method calls. In reality, however, there may be any
+ // number of other instructions between the lock and unlock, while
+ // still constituting an empty critical section. For example,
+ // given `m.x().Lock(); m.x().Unlock()`, there will be a call to
+ // x(). In the AST-based approach, this has a tiny potential for a
+ // false positive (the second call to x might be doing work that
+ // is protected by the mutex). In an SSA-based approach, however,
+ // it would miss a lot of real bugs.
+
+ mutexParams := func(s ast.Stmt) (x ast.Expr, funcName string, ok bool) {
+ expr, ok := s.(*ast.ExprStmt)
+ if !ok {
+ return nil, "", false
+ }
+ call, ok := expr.X.(*ast.CallExpr)
+ if !ok {
+ return nil, "", false
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return nil, "", false
+ }
+
+ fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func)
+ if !ok {
+ return nil, "", false
+ }
+ sig := fn.Type().(*types.Signature)
+ if sig.Params().Len() != 0 || sig.Results().Len() != 0 {
+ return nil, "", false
+ }
+
+ return sel.X, fn.Name(), true
+ }
+
+ fn := func(node ast.Node) bool {
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return true
+ }
+ if len(block.List) < 2 {
+ return true
+ }
+ for i := range block.List[:len(block.List)-1] {
+ sel1, method1, ok1 := mutexParams(block.List[i])
+ sel2, method2, ok2 := mutexParams(block.List[i+1])
+
+ if !ok1 || !ok2 || j.Render(sel1) != j.Render(sel2) {
+ continue
+ }
+ if (method1 == "Lock" && method2 == "Unlock") ||
+ (method1 == "RLock" && method2 == "RUnlock") {
+ j.Errorf(block.List[i+1], "empty critical section")
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+// cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't
+// want to flag.
+var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`)
+
+func (c *Checker) CheckIneffectiveCopy(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ if unary, ok := node.(*ast.UnaryExpr); ok {
+ if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND {
+ ident, ok := star.X.(*ast.Ident)
+ if !ok || !cgoIdent.MatchString(ident.Name) {
+ j.Errorf(unary, "&*x will be simplified to x. It will not copy x.")
+ }
+ }
+ }
+
+ if star, ok := node.(*ast.StarExpr); ok {
+ if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND {
+ j.Errorf(star, "*&x will be simplified to x. It will not copy x.")
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckDiffSizeComparison(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, b := range ssafn.Blocks {
+ for _, ins := range b.Instrs {
+ binop, ok := ins.(*ssa.BinOp)
+ if !ok {
+ continue
+ }
+ if binop.Op != token.EQL && binop.Op != token.NEQ {
+ continue
+ }
+ _, ok1 := binop.X.(*ssa.Slice)
+ _, ok2 := binop.Y.(*ssa.Slice)
+ if !ok1 && !ok2 {
+ continue
+ }
+ r := c.funcDescs.Get(ssafn).Ranges
+ r1, ok1 := r.Get(binop.X).(vrp.StringInterval)
+ r2, ok2 := r.Get(binop.Y).(vrp.StringInterval)
+ if !ok1 || !ok2 {
+ continue
+ }
+ if r1.Length.Intersection(r2.Length).Empty() {
+ j.Errorf(binop, "comparing strings of different sizes for equality will always return false")
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ assign, ok := node.(*ast.AssignStmt)
+ if ok {
+ // TODO(dh): This risks missing some Header reads, for
+ // example in `h1["foo"] = h2["foo"]` – these edge
+ // cases are probably rare enough to ignore for now.
+ for _, expr := range assign.Lhs {
+ op, ok := expr.(*ast.IndexExpr)
+ if !ok {
+ continue
+ }
+ if hasType(j, op.X, "net/http.Header") {
+ return false
+ }
+ }
+ return true
+ }
+ op, ok := node.(*ast.IndexExpr)
+ if !ok {
+ return true
+ }
+ if !hasType(j, op.X, "net/http.Header") {
+ return true
+ }
+ s, ok := j.ExprToString(op.Index)
+ if !ok {
+ return true
+ }
+ if s == http.CanonicalHeaderKey(s) {
+ return true
+ }
+ j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s)
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckBenchmarkN(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ assign, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 {
+ return true
+ }
+ sel, ok := assign.Lhs[0].(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if sel.Sel.Name != "N" {
+ return true
+ }
+ if !hasType(j, sel.X, "*testing.B") {
+ return true
+ }
+ j.Errorf(assign, "should not assign to %s", j.Render(sel))
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckUnreadVariableValues(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ switch node.(type) {
+ case *ast.FuncDecl, *ast.FuncLit:
+ default:
+ return true
+ }
+
+ ssafn := c.nodeFns[node]
+ if ssafn == nil {
+ return true
+ }
+ if lint.IsExample(ssafn) {
+ return true
+ }
+ ast.Inspect(node, func(node ast.Node) bool {
+ assign, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if len(assign.Lhs) > 1 && len(assign.Rhs) == 1 {
+ // Either a function call with multiple return values,
+ // or a comma-ok assignment
+
+ val, _ := ssafn.ValueForExpr(assign.Rhs[0])
+ if val == nil {
+ return true
+ }
+ refs := val.Referrers()
+ if refs == nil {
+ return true
+ }
+ for _, ref := range *refs {
+ ex, ok := ref.(*ssa.Extract)
+ if !ok {
+ continue
+ }
+ exrefs := ex.Referrers()
+ if exrefs == nil {
+ continue
+ }
+ if len(lint.FilterDebug(*exrefs)) == 0 {
+ lhs := assign.Lhs[ex.Index]
+ if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" {
+ continue
+ }
+ j.Errorf(lhs, "this value of %s is never used", lhs)
+ }
+ }
+ return true
+ }
+ for i, lhs := range assign.Lhs {
+ rhs := assign.Rhs[i]
+ if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" {
+ continue
+ }
+ val, _ := ssafn.ValueForExpr(rhs)
+ if val == nil {
+ continue
+ }
+
+ refs := val.Referrers()
+ if refs == nil {
+ // TODO investigate why refs can be nil
+ return true
+ }
+ if len(lint.FilterDebug(*refs)) == 0 {
+ j.Errorf(lhs, "this value of %s is never used", lhs)
+ }
+ }
+ return true
+ })
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ ssabinop, ok := ins.(*ssa.BinOp)
+ if !ok {
+ continue
+ }
+ switch ssabinop.Op {
+ case token.GTR, token.LSS, token.EQL, token.NEQ, token.LEQ, token.GEQ:
+ default:
+ continue
+ }
+
+ xs, ok1 := consts(ssabinop.X, nil, nil)
+ ys, ok2 := consts(ssabinop.Y, nil, nil)
+ if !ok1 || !ok2 || len(xs) == 0 || len(ys) == 0 {
+ continue
+ }
+
+ trues := 0
+ for _, x := range xs {
+ for _, y := range ys {
+ if x.Value == nil {
+ if y.Value == nil {
+ trues++
+ }
+ continue
+ }
+ if constant.Compare(x.Value, ssabinop.Op, y.Value) {
+ trues++
+ }
+ }
+ }
+ b := trues != 0
+ if trues == 0 || trues == len(xs)*len(ys) {
+ j.Errorf(ssabinop, "binary expression is always %t for all possible values (%s %s %s)",
+ b, xs, ssabinop.Op, ys)
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckNilMaps(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ mu, ok := ins.(*ssa.MapUpdate)
+ if !ok {
+ continue
+ }
+ c, ok := mu.Map.(*ssa.Const)
+ if !ok {
+ continue
+ }
+ if c.Value != nil {
+ continue
+ }
+ j.Errorf(mu, "assignment to nil map")
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckUnsignedComparison(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ expr, ok := node.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ tx := j.Program.Info.TypeOf(expr.X)
+ basic, ok := tx.Underlying().(*types.Basic)
+ if !ok {
+ return true
+ }
+ if (basic.Info() & types.IsUnsigned) == 0 {
+ return true
+ }
+ lit, ok := expr.Y.(*ast.BasicLit)
+ if !ok || lit.Value != "0" {
+ return true
+ }
+ switch expr.Op {
+ case token.GEQ:
+ j.Errorf(expr, "unsigned values are always >= 0")
+ case token.LSS:
+ j.Errorf(expr, "unsigned values are never < 0")
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) {
+ if visitedPhis == nil {
+ visitedPhis = map[string]bool{}
+ }
+ var ok bool
+ switch val := val.(type) {
+ case *ssa.Phi:
+ if visitedPhis[val.Name()] {
+ break
+ }
+ visitedPhis[val.Name()] = true
+ vals := val.Operands(nil)
+ for _, phival := range vals {
+ out, ok = consts(*phival, out, visitedPhis)
+ if !ok {
+ return nil, false
+ }
+ }
+ case *ssa.Const:
+ out = append(out, val)
+ case *ssa.Convert:
+ out, ok = consts(val.X, out, visitedPhis)
+ if !ok {
+ return nil, false
+ }
+ default:
+ return nil, false
+ }
+ if len(out) < 2 {
+ return out, true
+ }
+ uniq := []*ssa.Const{out[0]}
+ for _, val := range out[1:] {
+ if val.Value == uniq[len(uniq)-1].Value {
+ continue
+ }
+ uniq = append(uniq, val)
+ }
+ return uniq, true
+}
+
+func (c *Checker) CheckLoopCondition(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ loop, ok := node.(*ast.ForStmt)
+ if !ok {
+ return true
+ }
+ if loop.Init == nil || loop.Cond == nil || loop.Post == nil {
+ return true
+ }
+ init, ok := loop.Init.(*ast.AssignStmt)
+ if !ok || len(init.Lhs) != 1 || len(init.Rhs) != 1 {
+ return true
+ }
+ cond, ok := loop.Cond.(*ast.BinaryExpr)
+ if !ok {
+ return true
+ }
+ x, ok := cond.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ lhs, ok := init.Lhs[0].(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if x.Obj != lhs.Obj {
+ return true
+ }
+ if _, ok := loop.Post.(*ast.IncDecStmt); !ok {
+ return true
+ }
+
+ ssafn := c.nodeFns[cond]
+ if ssafn == nil {
+ return true
+ }
+ v, isAddr := ssafn.ValueForExpr(cond.X)
+ if v == nil || isAddr {
+ return true
+ }
+ switch v := v.(type) {
+ case *ssa.Phi:
+ ops := v.Operands(nil)
+ if len(ops) != 2 {
+ return true
+ }
+ _, ok := (*ops[0]).(*ssa.Const)
+ if !ok {
+ return true
+ }
+ sigma, ok := (*ops[1]).(*ssa.Sigma)
+ if !ok {
+ return true
+ }
+ if sigma.X != v {
+ return true
+ }
+ case *ssa.UnOp:
+ return true
+ }
+ j.Errorf(cond, "variable in loop condition never changes")
+
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckArgOverwritten(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ var typ *ast.FuncType
+ var body *ast.BlockStmt
+ switch fn := node.(type) {
+ case *ast.FuncDecl:
+ typ = fn.Type
+ body = fn.Body
+ case *ast.FuncLit:
+ typ = fn.Type
+ body = fn.Body
+ }
+ if body == nil {
+ return true
+ }
+ ssafn := c.nodeFns[node]
+ if ssafn == nil {
+ return true
+ }
+ if len(typ.Params.List) == 0 {
+ return true
+ }
+ for _, field := range typ.Params.List {
+ for _, arg := range field.Names {
+ obj := j.Program.Info.ObjectOf(arg)
+ var ssaobj *ssa.Parameter
+ for _, param := range ssafn.Params {
+ if param.Object() == obj {
+ ssaobj = param
+ break
+ }
+ }
+ if ssaobj == nil {
+ continue
+ }
+ refs := ssaobj.Referrers()
+ if refs == nil {
+ continue
+ }
+ if len(lint.FilterDebug(*refs)) != 0 {
+ continue
+ }
+
+ assigned := false
+ ast.Inspect(body, func(node ast.Node) bool {
+ assign, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ for _, lhs := range assign.Lhs {
+ ident, ok := lhs.(*ast.Ident)
+ if !ok {
+ continue
+ }
+ if j.Program.Info.ObjectOf(ident) == obj {
+ assigned = true
+ return false
+ }
+ }
+ return true
+ })
+ if assigned {
+ j.Errorf(arg, "argument %s is overwritten before first use", arg)
+ }
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckIneffectiveLoop(j *lint.Job) {
+ // This check detects some, but not all unconditional loop exits.
+ // We give up in the following cases:
+ //
+ // - a goto anywhere in the loop. The goto might skip over our
+ // return, and we don't check that it doesn't.
+ //
+ // - any nested, unlabelled continue, even if it is in another
+ // loop or closure.
+ fn := func(node ast.Node) bool {
+ var body *ast.BlockStmt
+ switch fn := node.(type) {
+ case *ast.FuncDecl:
+ body = fn.Body
+ case *ast.FuncLit:
+ body = fn.Body
+ default:
+ return true
+ }
+ if body == nil {
+ return true
+ }
+ labels := map[*ast.Object]ast.Stmt{}
+ ast.Inspect(body, func(node ast.Node) bool {
+ label, ok := node.(*ast.LabeledStmt)
+ if !ok {
+ return true
+ }
+ labels[label.Label.Obj] = label.Stmt
+ return true
+ })
+
+ ast.Inspect(body, func(node ast.Node) bool {
+ var loop ast.Node
+ var body *ast.BlockStmt
+ switch node := node.(type) {
+ case *ast.ForStmt:
+ body = node.Body
+ loop = node
+ case *ast.RangeStmt:
+ typ := j.Program.Info.TypeOf(node.X)
+ if _, ok := typ.Underlying().(*types.Map); ok {
+ // looping once over a map is a valid pattern for
+ // getting an arbitrary element.
+ return true
+ }
+ body = node.Body
+ loop = node
+ default:
+ return true
+ }
+ if len(body.List) < 2 {
+ // avoid flagging the somewhat common pattern of using
+ // a range loop to get the first element in a slice,
+ // or the first rune in a string.
+ return true
+ }
+ var unconditionalExit ast.Node
+ hasBranching := false
+ for _, stmt := range body.List {
+ switch stmt := stmt.(type) {
+ case *ast.BranchStmt:
+ switch stmt.Tok {
+ case token.BREAK:
+ if stmt.Label == nil || labels[stmt.Label.Obj] == loop {
+ unconditionalExit = stmt
+ }
+ case token.CONTINUE:
+ if stmt.Label == nil || labels[stmt.Label.Obj] == loop {
+ unconditionalExit = nil
+ return false
+ }
+ }
+ case *ast.ReturnStmt:
+ unconditionalExit = stmt
+ case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.SelectStmt:
+ hasBranching = true
+ }
+ }
+ if unconditionalExit == nil || !hasBranching {
+ return false
+ }
+ ast.Inspect(body, func(node ast.Node) bool {
+ if branch, ok := node.(*ast.BranchStmt); ok {
+
+ switch branch.Tok {
+ case token.GOTO:
+ unconditionalExit = nil
+ return false
+ case token.CONTINUE:
+ if branch.Label != nil && labels[branch.Label.Obj] != loop {
+ return true
+ }
+ unconditionalExit = nil
+ return false
+ }
+ }
+ return true
+ })
+ if unconditionalExit != nil {
+ j.Errorf(unconditionalExit, "the surrounding loop is unconditionally terminated")
+ }
+ return true
+ })
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckNilContext(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if len(call.Args) == 0 {
+ return true
+ }
+ if typ, ok := j.Program.Info.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil {
+ return true
+ }
+ sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature)
+ if !ok {
+ return true
+ }
+ if sig.Params().Len() == 0 {
+ return true
+ }
+ if types.TypeString(sig.Params().At(0).Type(), nil) != "context.Context" {
+ return true
+ }
+ j.Errorf(call.Args[0],
+ "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use")
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckSeeker(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if sel.Sel.Name != "Seek" {
+ return true
+ }
+ if len(call.Args) != 2 {
+ return true
+ }
+ arg0, ok := call.Args[0].(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ switch arg0.Sel.Name {
+ case "SeekStart", "SeekCurrent", "SeekEnd":
+ default:
+ return true
+ }
+ pkg, ok := arg0.X.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if pkg.Name != "io" {
+ return true
+ }
+ j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead")
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckIneffectiveAppend(j *lint.Job) {
+ isAppend := func(ins ssa.Value) bool {
+ call, ok := ins.(*ssa.Call)
+ if !ok {
+ return false
+ }
+ if call.Call.IsInvoke() {
+ return false
+ }
+ if builtin, ok := call.Call.Value.(*ssa.Builtin); !ok || builtin.Name() != "append" {
+ return false
+ }
+ return true
+ }
+
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ val, ok := ins.(ssa.Value)
+ if !ok || !isAppend(val) {
+ continue
+ }
+
+ isUsed := false
+ visited := map[ssa.Instruction]bool{}
+ var walkRefs func(refs []ssa.Instruction)
+ walkRefs = func(refs []ssa.Instruction) {
+ loop:
+ for _, ref := range refs {
+ if visited[ref] {
+ continue
+ }
+ visited[ref] = true
+ if _, ok := ref.(*ssa.DebugRef); ok {
+ continue
+ }
+ switch ref := ref.(type) {
+ case *ssa.Phi:
+ walkRefs(*ref.Referrers())
+ case *ssa.Sigma:
+ walkRefs(*ref.Referrers())
+ case ssa.Value:
+ if !isAppend(ref) {
+ isUsed = true
+ } else {
+ walkRefs(*ref.Referrers())
+ }
+ case ssa.Instruction:
+ isUsed = true
+ break loop
+ }
+ }
+ }
+ refs := val.Referrers()
+ if refs == nil {
+ continue
+ }
+ walkRefs(*refs)
+ if !isUsed {
+ j.Errorf(ins, "this result of append is never used, except maybe in other appends")
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckConcurrentTesting(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ gostmt, ok := ins.(*ssa.Go)
+ if !ok {
+ continue
+ }
+ var fn *ssa.Function
+ switch val := gostmt.Call.Value.(type) {
+ case *ssa.Function:
+ fn = val
+ case *ssa.MakeClosure:
+ fn = val.Fn.(*ssa.Function)
+ default:
+ continue
+ }
+ if fn.Blocks == nil {
+ continue
+ }
+ for _, block := range fn.Blocks {
+ for _, ins := range block.Instrs {
+ call, ok := ins.(*ssa.Call)
+ if !ok {
+ continue
+ }
+ if call.Call.IsInvoke() {
+ continue
+ }
+ callee := call.Call.StaticCallee()
+ if callee == nil {
+ continue
+ }
+ recv := callee.Signature.Recv()
+ if recv == nil {
+ continue
+ }
+ if types.TypeString(recv.Type(), nil) != "*testing.common" {
+ continue
+ }
+ fn, ok := call.Call.StaticCallee().Object().(*types.Func)
+ if !ok {
+ continue
+ }
+ name := fn.Name()
+ switch name {
+ case "FailNow", "Fatal", "Fatalf", "SkipNow", "Skip", "Skipf":
+ default:
+ continue
+ }
+ j.Errorf(gostmt, "the goroutine calls T.%s, which must be called in the same goroutine as the test", name)
+ }
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckCyclicFinalizer(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ node := c.funcDescs.CallGraph.CreateNode(ssafn)
+ for _, edge := range node.Out {
+ if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" {
+ continue
+ }
+ arg0 := edge.Site.Common().Args[0]
+ if iface, ok := arg0.(*ssa.MakeInterface); ok {
+ arg0 = iface.X
+ }
+ unop, ok := arg0.(*ssa.UnOp)
+ if !ok {
+ continue
+ }
+ v, ok := unop.X.(*ssa.Alloc)
+ if !ok {
+ continue
+ }
+ arg1 := edge.Site.Common().Args[1]
+ if iface, ok := arg1.(*ssa.MakeInterface); ok {
+ arg1 = iface.X
+ }
+ mc, ok := arg1.(*ssa.MakeClosure)
+ if !ok {
+ continue
+ }
+ for _, b := range mc.Bindings {
+ if b == v {
+ pos := j.Program.DisplayPosition(mc.Fn.Pos())
+ j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos)
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ ia, ok := ins.(*ssa.IndexAddr)
+ if !ok {
+ continue
+ }
+ if _, ok := ia.X.Type().Underlying().(*types.Slice); !ok {
+ continue
+ }
+ sr, ok1 := c.funcDescs.Get(ssafn).Ranges[ia.X].(vrp.SliceInterval)
+ idxr, ok2 := c.funcDescs.Get(ssafn).Ranges[ia.Index].(vrp.IntInterval)
+ if !ok1 || !ok2 || !sr.IsKnown() || !idxr.IsKnown() || sr.Length.Empty() || idxr.Empty() {
+ continue
+ }
+ if idxr.Lower.Cmp(sr.Length.Upper) >= 0 {
+ j.Errorf(ia, "index out of bounds")
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckDeferLock(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ instrs := lint.FilterDebug(block.Instrs)
+ if len(instrs) < 2 {
+ continue
+ }
+ for i, ins := range instrs[:len(instrs)-1] {
+ call, ok := ins.(*ssa.Call)
+ if !ok {
+ continue
+ }
+ if !lint.IsCallTo(call.Common(), "(*sync.Mutex).Lock") && !lint.IsCallTo(call.Common(), "(*sync.RWMutex).RLock") {
+ continue
+ }
+ nins, ok := instrs[i+1].(*ssa.Defer)
+ if !ok {
+ continue
+ }
+ if !lint.IsCallTo(&nins.Call, "(*sync.Mutex).Lock") && !lint.IsCallTo(&nins.Call, "(*sync.RWMutex).RLock") {
+ continue
+ }
+ if call.Common().Args[0] != nins.Call.Args[0] {
+ continue
+ }
+ name := shortCallName(call.Common())
+ alt := ""
+ switch name {
+ case "Lock":
+ alt = "Unlock"
+ case "RLock":
+ alt = "RUnlock"
+ }
+ j.Errorf(nins, "deferring %s right after having locked already; did you mean to defer %s?", name, alt)
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckNaNComparison(j *lint.Job) {
+ isNaN := func(v ssa.Value) bool {
+ call, ok := v.(*ssa.Call)
+ if !ok {
+ return false
+ }
+ return lint.IsCallTo(call.Common(), "math.NaN")
+ }
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ ins, ok := ins.(*ssa.BinOp)
+ if !ok {
+ continue
+ }
+ if isNaN(ins.X) || isNaN(ins.Y) {
+ j.Errorf(ins, "no value is equal to NaN, not even NaN itself")
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckInfiniteRecursion(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ node := c.funcDescs.CallGraph.CreateNode(ssafn)
+ for _, edge := range node.Out {
+ if edge.Callee != node {
+ continue
+ }
+ if _, ok := edge.Site.(*ssa.Go); ok {
+ // Recursively spawning goroutines doesn't consume
+ // stack space infinitely, so don't flag it.
+ continue
+ }
+
+ block := edge.Site.Block()
+ canReturn := false
+ for _, b := range ssafn.Blocks {
+ if block.Dominates(b) {
+ continue
+ }
+ if len(b.Instrs) == 0 {
+ continue
+ }
+ if _, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return); ok {
+ canReturn = true
+ break
+ }
+ }
+ if canReturn {
+ continue
+ }
+ j.Errorf(edge.Site, "infinite recursive call")
+ }
+ }
+}
+
+func objectName(obj types.Object) string {
+ if obj == nil {
+ return "<nil>"
+ }
+ var name string
+ if obj.Pkg() != nil && obj.Pkg().Scope().Lookup(obj.Name()) == obj {
+ var s string
+ s = obj.Pkg().Path()
+ if s != "" {
+ name += s + "."
+ }
+ }
+ name += obj.Name()
+ return name
+}
+
+func isName(j *lint.Job, expr ast.Expr, name string) bool {
+ var obj types.Object
+ switch expr := expr.(type) {
+ case *ast.Ident:
+ obj = j.Program.Info.ObjectOf(expr)
+ case *ast.SelectorExpr:
+ obj = j.Program.Info.ObjectOf(expr.Sel)
+ }
+ return objectName(obj) == name
+}
+
+func (c *Checker) CheckLeakyTimeTick(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ if j.IsInMain(ssafn) || j.IsInTest(ssafn) {
+ continue
+ }
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ call, ok := ins.(*ssa.Call)
+ if !ok || !lint.IsCallTo(call.Common(), "time.Tick") {
+ continue
+ }
+ if c.funcDescs.Get(call.Parent()).Infinite {
+ continue
+ }
+ j.Errorf(call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here")
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckDoubleNegation(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ unary1, ok := node.(*ast.UnaryExpr)
+ if !ok {
+ return true
+ }
+ unary2, ok := unary1.X.(*ast.UnaryExpr)
+ if !ok {
+ return true
+ }
+ if unary1.Op != token.NOT || unary2.Op != token.NOT {
+ return true
+ }
+ j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?")
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func hasSideEffects(node ast.Node) bool {
+ dynamic := false
+ ast.Inspect(node, func(node ast.Node) bool {
+ switch node := node.(type) {
+ case *ast.CallExpr:
+ dynamic = true
+ return false
+ case *ast.UnaryExpr:
+ if node.Op == token.ARROW {
+ dynamic = true
+ return false
+ }
+ }
+ return true
+ })
+ return dynamic
+}
+
+func (c *Checker) CheckRepeatedIfElse(j *lint.Job) {
+ seen := map[ast.Node]bool{}
+
+ var collectConds func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr)
+ collectConds = func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) {
+ seen[ifstmt] = true
+ if ifstmt.Init != nil {
+ inits = append(inits, ifstmt.Init)
+ }
+ conds = append(conds, ifstmt.Cond)
+ if elsestmt, ok := ifstmt.Else.(*ast.IfStmt); ok {
+ return collectConds(elsestmt, inits, conds)
+ }
+ return inits, conds
+ }
+ fn := func(node ast.Node) bool {
+ ifstmt, ok := node.(*ast.IfStmt)
+ if !ok {
+ return true
+ }
+ if seen[ifstmt] {
+ return true
+ }
+ inits, conds := collectConds(ifstmt, nil, nil)
+ if len(inits) > 0 {
+ return true
+ }
+ for _, cond := range conds {
+ if hasSideEffects(cond) {
+ return true
+ }
+ }
+ counts := map[string]int{}
+ for _, cond := range conds {
+ s := j.Render(cond)
+ counts[s]++
+ if counts[s] == 2 {
+ j.Errorf(cond, "this condition occurs multiple times in this if/else if chain")
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ ins, ok := ins.(*ssa.BinOp)
+ if !ok {
+ continue
+ }
+
+ if c, ok := ins.Y.(*ssa.Const); !ok || c.Value == nil || c.Value.Kind() != constant.Int || c.Uint64() != 0 {
+ continue
+ }
+ switch ins.Op {
+ case token.AND, token.OR, token.XOR:
+ default:
+ // we do not flag shifts because too often, x<<0 is part
+ // of a pattern, x<<0, x<<8, x<<16, ...
+ continue
+ }
+ path, _ := astutil.PathEnclosingInterval(j.File(ins), ins.Pos(), ins.Pos())
+ if len(path) == 0 {
+ continue
+ }
+ if node, ok := path[0].(*ast.BinaryExpr); !ok || !lint.IsZero(node.Y) {
+ continue
+ }
+
+ switch ins.Op {
+ case token.AND:
+ j.Errorf(ins, "x & 0 always equals 0")
+ case token.OR, token.XOR:
+ j.Errorf(ins, "x %s 0 always equals x", ins.Op)
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckNonOctalFileMode(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature)
+ if !ok {
+ return true
+ }
+ n := sig.Params().Len()
+ var args []int
+ for i := 0; i < n; i++ {
+ typ := sig.Params().At(i).Type()
+ if types.TypeString(typ, nil) == "os.FileMode" {
+ args = append(args, i)
+ }
+ }
+ for _, i := range args {
+ lit, ok := call.Args[i].(*ast.BasicLit)
+ if !ok {
+ continue
+ }
+ if len(lit.Value) == 3 &&
+ lit.Value[0] != '0' &&
+ lit.Value[0] >= '0' && lit.Value[0] <= '7' &&
+ lit.Value[1] >= '0' && lit.Value[1] <= '7' &&
+ lit.Value[2] >= '0' && lit.Value[2] <= '7' {
+
+ v, err := strconv.ParseInt(lit.Value, 10, 64)
+ if err != nil {
+ continue
+ }
+ j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value)
+ }
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckPureFunctions(j *lint.Job) {
+fnLoop:
+ for _, ssafn := range j.Program.InitialFunctions {
+ if j.IsInTest(ssafn) {
+ params := ssafn.Signature.Params()
+ for i := 0; i < params.Len(); i++ {
+ param := params.At(i)
+ if types.TypeString(param.Type(), nil) == "*testing.B" {
+ // Ignore discarded pure functions in code related
+ // to benchmarks. Instead of matching BenchmarkFoo
+ // functions, we match any function accepting a
+ // *testing.B. Benchmarks sometimes call generic
+ // functions for doing the actual work, and
+ // checking for the parameter is a lot easier and
+ // faster than analyzing call trees.
+ continue fnLoop
+ }
+ }
+ }
+
+ for _, b := range ssafn.Blocks {
+ for _, ins := range b.Instrs {
+ ins, ok := ins.(*ssa.Call)
+ if !ok {
+ continue
+ }
+ refs := ins.Referrers()
+ if refs == nil || len(lint.FilterDebug(*refs)) > 0 {
+ continue
+ }
+ callee := ins.Common().StaticCallee()
+ if callee == nil {
+ continue
+ }
+ if c.funcDescs.Get(callee).Pure && !c.funcDescs.Get(callee).Stub {
+ j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name())
+ continue
+ }
+ }
+ }
+ }
+}
+
+func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) {
+ obj := j.Program.Info.ObjectOf(ident)
+ if obj.Pkg() == nil {
+ return false, ""
+ }
+ alt := c.deprecatedObjs[obj]
+ return alt != "", alt
+}
+
+func selectorName(j *lint.Job, expr *ast.SelectorExpr) string {
+ sel := j.Program.Info.Selections[expr]
+ if sel == nil {
+ if x, ok := expr.X.(*ast.Ident); ok {
+ pkg, ok := j.Program.Info.ObjectOf(x).(*types.PkgName)
+ if !ok {
+ // This shouldn't happen
+ return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
+ }
+ return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
+ }
+ panic(fmt.Sprintf("unsupported selector: %v", expr))
+ }
+ return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
+}
+
+func (c *Checker) enclosingFunc(sel *ast.SelectorExpr) *ssa.Function {
+ fn := c.nodeFns[sel]
+ if fn == nil {
+ return nil
+ }
+ for fn.Parent() != nil {
+ fn = fn.Parent()
+ }
+ return fn
+}
+
+func (c *Checker) CheckDeprecated(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ sel, ok := node.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+
+ obj := j.Program.Info.ObjectOf(sel.Sel)
+ if obj.Pkg() == nil {
+ return true
+ }
+ nodePkg := j.NodePackage(node).Pkg
+ if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() {
+ // Don't flag stuff in our own package
+ return true
+ }
+ if ok, alt := c.isDeprecated(j, sel.Sel); ok {
+ // Look for the first available alternative, not the first
+ // version something was deprecated in. If a function was
+ // deprecated in Go 1.6, an alternative has been available
+ // already in 1.0, and we're targetting 1.2, it still
+ // makes sense to use the alternative from 1.0, to be
+ // future-proof.
+ minVersion := deprecated.Stdlib[selectorName(j, sel)].AlternativeAvailableSince
+ if !j.IsGoVersion(minVersion) {
+ return true
+ }
+
+ if fn := c.enclosingFunc(sel); fn != nil {
+ if _, ok := c.deprecatedObjs[fn.Object()]; ok {
+ // functions that are deprecated may use deprecated
+ // symbols
+ return true
+ }
+ }
+ j.Errorf(sel, "%s is deprecated: %s", j.Render(sel), alt)
+ return true
+ }
+ return true
+ }
+ for _, f := range j.Program.Files {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) {
+ return func(j *lint.Job) {
+ c.checkCalls(j, rules)
+ }
+}
+
+func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) {
+ for _, ssafn := range j.Program.InitialFunctions {
+ node := c.funcDescs.CallGraph.CreateNode(ssafn)
+ for _, edge := range node.Out {
+ callee := edge.Callee.Func
+ obj, ok := callee.Object().(*types.Func)
+ if !ok {
+ continue
+ }
+
+ r, ok := rules[obj.FullName()]
+ if !ok {
+ continue
+ }
+ var args []*Argument
+ ssaargs := edge.Site.Common().Args
+ if callee.Signature.Recv() != nil {
+ ssaargs = ssaargs[1:]
+ }
+ for _, arg := range ssaargs {
+ if iarg, ok := arg.(*ssa.MakeInterface); ok {
+ arg = iarg.X
+ }
+ vr := c.funcDescs.Get(edge.Site.Parent()).Ranges[arg]
+ args = append(args, &Argument{Value: Value{arg, vr}})
+ }
+ call := &Call{
+ Job: j,
+ Instr: edge.Site,
+ Args: args,
+ Checker: c,
+ Parent: edge.Site.Parent(),
+ }
+ r(call)
+ for idx, arg := range call.Args {
+ _ = idx
+ for _, e := range arg.invalids {
+ // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos())
+ // if len(path) < 2 {
+ // continue
+ // }
+ // astcall, ok := path[0].(*ast.CallExpr)
+ // if !ok {
+ // continue
+ // }
+ // j.Errorf(astcall.Args[idx], "%s", e)
+
+ j.Errorf(edge.Site, "%s", e)
+ }
+ }
+ for _, e := range call.invalids {
+ j.Errorf(call.Instr.Common(), "%s", e)
+ }
+ }
+ }
+}
+
+func unwrapFunction(val ssa.Value) *ssa.Function {
+ switch val := val.(type) {
+ case *ssa.Function:
+ return val
+ case *ssa.MakeClosure:
+ return val.Fn.(*ssa.Function)
+ default:
+ return nil
+ }
+}
+
+func shortCallName(call *ssa.CallCommon) string {
+ if call.IsInvoke() {
+ return ""
+ }
+ switch v := call.Value.(type) {
+ case *ssa.Function:
+ fn, ok := v.Object().(*types.Func)
+ if !ok {
+ return ""
+ }
+ return fn.Name()
+ case *ssa.Builtin:
+ return v.Name()
+ }
+ return ""
+}
+
+func hasCallTo(block *ssa.BasicBlock, name string) bool {
+ for _, ins := range block.Instrs {
+ call, ok := ins.(*ssa.Call)
+ if !ok {
+ continue
+ }
+ if lint.IsCallTo(call.Common(), name) {
+ return true
+ }
+ }
+ return false
+}
+
+// deref returns a pointer's element type; otherwise it returns typ.
+func deref(typ types.Type) types.Type {
+ if p, ok := typ.Underlying().(*types.Pointer); ok {
+ return p.Elem()
+ }
+ return typ
+}
+
+func (c *Checker) CheckWriterBufferModified(j *lint.Job) {
+ // TODO(dh): this might be a good candidate for taint analysis.
+ // Taint the argument as MUST_NOT_MODIFY, then propagate that
+ // through functions like bytes.Split
+
+ for _, ssafn := range j.Program.InitialFunctions {
+ sig := ssafn.Signature
+ if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 {
+ continue
+ }
+ tArg, ok := sig.Params().At(0).Type().(*types.Slice)
+ if !ok {
+ continue
+ }
+ if basic, ok := tArg.Elem().(*types.Basic); !ok || basic.Kind() != types.Byte {
+ continue
+ }
+ if basic, ok := sig.Results().At(0).Type().(*types.Basic); !ok || basic.Kind() != types.Int {
+ continue
+ }
+ if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || types.TypeString(named, nil) != "error" {
+ continue
+ }
+
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ switch ins := ins.(type) {
+ case *ssa.Store:
+ addr, ok := ins.Addr.(*ssa.IndexAddr)
+ if !ok {
+ continue
+ }
+ if addr.X != ssafn.Params[1] {
+ continue
+ }
+ j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily")
+ case *ssa.Call:
+ if !lint.IsCallTo(ins.Common(), "append") {
+ continue
+ }
+ if ins.Common().Args[0] != ssafn.Params[1] {
+ continue
+ }
+ j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily")
+ }
+ }
+ }
+ }
+}
+
+func loopedRegexp(name string) CallCheck {
+ return func(call *Call) {
+ if len(extractConsts(call.Args[0].Value.Value)) == 0 {
+ return
+ }
+ if !call.Checker.isInLoop(call.Instr.Block()) {
+ return
+ }
+ call.Invalid(fmt.Sprintf("calling %s in a loop has poor performance, consider using regexp.Compile", name))
+ }
+}
+
+func (c *Checker) CheckEmptyBranch(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ ifstmt, ok := node.(*ast.IfStmt)
+ if !ok {
+ return true
+ }
+ ssafn := c.nodeFns[node]
+ if lint.IsExample(ssafn) {
+ return true
+ }
+ if ifstmt.Else != nil {
+ b, ok := ifstmt.Else.(*ast.BlockStmt)
+ if !ok || len(b.List) != 0 {
+ return true
+ }
+ j.Errorf(ifstmt.Else, "empty branch")
+ }
+ if len(ifstmt.Body.List) != 0 {
+ return true
+ }
+ j.Errorf(ifstmt, "empty branch")
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func (c *Checker) CheckMapBytesKey(j *lint.Job) {
+ for _, fn := range j.Program.InitialFunctions {
+ for _, b := range fn.Blocks {
+ insLoop:
+ for _, ins := range b.Instrs {
+ // find []byte -> string conversions
+ conv, ok := ins.(*ssa.Convert)
+ if !ok || conv.Type() != types.Universe.Lookup("string").Type() {
+ continue
+ }
+ if s, ok := conv.X.Type().(*types.Slice); !ok || s.Elem() != types.Universe.Lookup("byte").Type() {
+ continue
+ }
+ refs := conv.Referrers()
+ // need at least two (DebugRef) references: the
+ // conversion and the *ast.Ident
+ if refs == nil || len(*refs) < 2 {
+ continue
+ }
+ ident := false
+ // skip first reference, that's the conversion itself
+ for _, ref := range (*refs)[1:] {
+ switch ref := ref.(type) {
+ case *ssa.DebugRef:
+ if _, ok := ref.Expr.(*ast.Ident); !ok {
+ // the string seems to be used somewhere
+ // unexpected; the default branch should
+ // catch this already, but be safe
+ continue insLoop
+ } else {
+ ident = true
+ }
+ case *ssa.Lookup:
+ default:
+ // the string is used somewhere else than a
+ // map lookup
+ continue insLoop
+ }
+ }
+
+ // the result of the conversion wasn't assigned to an
+ // identifier
+ if !ident {
+ continue
+ }
+ j.Errorf(conv, "m[string(key)] would be more efficient than k := string(key); m[k]")
+ }
+ }
+ }
+}
+
+func (c *Checker) CheckRangeStringRunes(j *lint.Job) {
+ sharedcheck.CheckRangeStringRunes(c.nodeFns, j)
+}
+
+func (c *Checker) CheckSelfAssignment(j *lint.Job) {
+ fn := func(node ast.Node) bool {
+ assign, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) {
+ return true
+ }
+ for i, stmt := range assign.Lhs {
+ rlh := j.Render(stmt)
+ rrh := j.Render(assign.Rhs[i])
+ if rlh == rrh {
+ j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh)
+ }
+ }
+ return true
+ }
+ for _, f := range c.filterGenerated(j.Program.Files) {
+ ast.Inspect(f, fn)
+ }
+}
+
+func buildTagsIdentical(s1, s2 []string) bool {
+ if len(s1) != len(s2) {
+ return false
+ }
+ s1s := make([]string, len(s1))
+ copy(s1s, s1)
+ sort.Strings(s1s)
+ s2s := make([]string, len(s2))
+ copy(s2s, s2)
+ sort.Strings(s2s)
+ for i, s := range s1s {
+ if s != s2s[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) {
+ for _, f := range c.filterGenerated(job.Program.Files) {
+ constraints := buildTags(f)
+ for i, constraint1 := range constraints {
+ for j, constraint2 := range constraints {
+ if i >= j {
+ continue
+ }
+ if buildTagsIdentical(constraint1, constraint2) {
+ job.Errorf(f, "identical build constraints %q and %q",
+ strings.Join(constraint1, " "),
+ strings.Join(constraint2, " "))
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/rules.go b/vendor/honnef.co/go/tools/staticcheck/rules.go
new file mode 100644
index 0000000..60cc00c
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/rules.go
@@ -0,0 +1,321 @@
+package staticcheck
+
+import (
+ "fmt"
+ "go/constant"
+ "go/types"
+ "net"
+ "net/url"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/staticcheck/vrp"
+)
+
+const (
+ MsgInvalidHostPort = "invalid port or service name in host:port pair"
+ MsgInvalidUTF8 = "argument is not a valid UTF-8 encoded string"
+ MsgNonUniqueCutset = "cutset contains duplicate characters"
+)
+
+type Call struct {
+ Job *lint.Job
+ Instr ssa.CallInstruction
+ Args []*Argument
+
+ Checker *Checker
+ Parent *ssa.Function
+
+ invalids []string
+}
+
+func (c *Call) Invalid(msg string) {
+ c.invalids = append(c.invalids, msg)
+}
+
+type Argument struct {
+ Value Value
+ invalids []string
+}
+
+func (arg *Argument) Invalid(msg string) {
+ arg.invalids = append(arg.invalids, msg)
+}
+
+type Value struct {
+ Value ssa.Value
+ Range vrp.Range
+}
+
+type CallCheck func(call *Call)
+
+func extractConsts(v ssa.Value) []*ssa.Const {
+ switch v := v.(type) {
+ case *ssa.Const:
+ return []*ssa.Const{v}
+ case *ssa.MakeInterface:
+ return extractConsts(v.X)
+ default:
+ return nil
+ }
+}
+
+func ValidateRegexp(v Value) error {
+ for _, c := range extractConsts(v.Value) {
+ if c.Value == nil {
+ continue
+ }
+ if c.Value.Kind() != constant.String {
+ continue
+ }
+ s := constant.StringVal(c.Value)
+ if _, err := regexp.Compile(s); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func ValidateTimeLayout(v Value) error {
+ for _, c := range extractConsts(v.Value) {
+ if c.Value == nil {
+ continue
+ }
+ if c.Value.Kind() != constant.String {
+ continue
+ }
+ s := constant.StringVal(c.Value)
+ s = strings.Replace(s, "_", " ", -1)
+ s = strings.Replace(s, "Z", "-", -1)
+ _, err := time.Parse(s, s)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func ValidateURL(v Value) error {
+ for _, c := range extractConsts(v.Value) {
+ if c.Value == nil {
+ continue
+ }
+ if c.Value.Kind() != constant.String {
+ continue
+ }
+ s := constant.StringVal(c.Value)
+ _, err := url.Parse(s)
+ if err != nil {
+ return fmt.Errorf("%q is not a valid URL: %s", s, err)
+ }
+ }
+ return nil
+}
+
+func IntValue(v Value, z vrp.Z) bool {
+ r, ok := v.Range.(vrp.IntInterval)
+ if !ok || !r.IsKnown() {
+ return false
+ }
+ if r.Lower != r.Upper {
+ return false
+ }
+ if r.Lower.Cmp(z) == 0 {
+ return true
+ }
+ return false
+}
+
+func InvalidUTF8(v Value) bool {
+ for _, c := range extractConsts(v.Value) {
+ if c.Value == nil {
+ continue
+ }
+ if c.Value.Kind() != constant.String {
+ continue
+ }
+ s := constant.StringVal(c.Value)
+ if !utf8.ValidString(s) {
+ return true
+ }
+ }
+ return false
+}
+
+func UnbufferedChannel(v Value) bool {
+ r, ok := v.Range.(vrp.ChannelInterval)
+ if !ok || !r.IsKnown() {
+ return false
+ }
+ if r.Size.Lower.Cmp(vrp.NewZ(0)) == 0 &&
+ r.Size.Upper.Cmp(vrp.NewZ(0)) == 0 {
+ return true
+ }
+ return false
+}
+
+func Pointer(v Value) bool {
+ switch v.Value.Type().Underlying().(type) {
+ case *types.Pointer, *types.Interface:
+ return true
+ }
+ return false
+}
+
+func ConvertedFromInt(v Value) bool {
+ conv, ok := v.Value.(*ssa.Convert)
+ if !ok {
+ return false
+ }
+ b, ok := conv.X.Type().Underlying().(*types.Basic)
+ if !ok {
+ return false
+ }
+ if (b.Info() & types.IsInteger) == 0 {
+ return false
+ }
+ return true
+}
+
+func validEncodingBinaryType(j *lint.Job, typ types.Type) bool {
+ typ = typ.Underlying()
+ switch typ := typ.(type) {
+ case *types.Basic:
+ switch typ.Kind() {
+ case types.Uint8, types.Uint16, types.Uint32, types.Uint64,
+ types.Int8, types.Int16, types.Int32, types.Int64,
+ types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid:
+ return true
+ case types.Bool:
+ return j.IsGoVersion(8)
+ }
+ return false
+ case *types.Struct:
+ n := typ.NumFields()
+ for i := 0; i < n; i++ {
+ if !validEncodingBinaryType(j, typ.Field(i).Type()) {
+ return false
+ }
+ }
+ return true
+ case *types.Array:
+ return validEncodingBinaryType(j, typ.Elem())
+ case *types.Interface:
+ // we can't determine if it's a valid type or not
+ return true
+ }
+ return false
+}
+
+func CanBinaryMarshal(j *lint.Job, v Value) bool {
+ typ := v.Value.Type().Underlying()
+ if ttyp, ok := typ.(*types.Pointer); ok {
+ typ = ttyp.Elem().Underlying()
+ }
+ if ttyp, ok := typ.(interface {
+ Elem() types.Type
+ }); ok {
+ if _, ok := ttyp.(*types.Pointer); !ok {
+ typ = ttyp.Elem()
+ }
+ }
+
+ return validEncodingBinaryType(j, typ)
+}
+
+func RepeatZeroTimes(name string, arg int) CallCheck {
+ return func(call *Call) {
+ arg := call.Args[arg]
+ if IntValue(arg.Value, vrp.NewZ(0)) {
+ arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name))
+ }
+ }
+}
+
+func validateServiceName(s string) bool {
+ if len(s) < 1 || len(s) > 15 {
+ return false
+ }
+ if s[0] == '-' || s[len(s)-1] == '-' {
+ return false
+ }
+ if strings.Contains(s, "--") {
+ return false
+ }
+ hasLetter := false
+ for _, r := range s {
+ if (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') {
+ hasLetter = true
+ continue
+ }
+ if r >= '0' && r <= '9' {
+ continue
+ }
+ return false
+ }
+ return hasLetter
+}
+
+func validatePort(s string) bool {
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ return validateServiceName(s)
+ }
+ return n >= 0 && n <= 65535
+}
+
+func ValidHostPort(v Value) bool {
+ for _, k := range extractConsts(v.Value) {
+ if k.Value == nil {
+ continue
+ }
+ if k.Value.Kind() != constant.String {
+ continue
+ }
+ s := constant.StringVal(k.Value)
+ _, port, err := net.SplitHostPort(s)
+ if err != nil {
+ return false
+ }
+ // TODO(dh): check hostname
+ if !validatePort(port) {
+ return false
+ }
+ }
+ return true
+}
+
+// ConvertedFrom reports whether value v was converted from type typ.
+func ConvertedFrom(v Value, typ string) bool {
+ change, ok := v.Value.(*ssa.ChangeType)
+ return ok && types.TypeString(change.X.Type(), nil) == typ
+}
+
+func UniqueStringCutset(v Value) bool {
+ for _, c := range extractConsts(v.Value) {
+ if c.Value == nil {
+ continue
+ }
+ if c.Value.Kind() != constant.String {
+ continue
+ }
+ s := constant.StringVal(c.Value)
+ rs := runeSlice(s)
+ if len(rs) < 2 {
+ continue
+ }
+ sort.Sort(rs)
+ for i, r := range rs[1:] {
+ if rs[i] == r {
+ return false
+ }
+ }
+ }
+ return true
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go
new file mode 100644
index 0000000..c8fbacb
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go
@@ -0,0 +1,73 @@
+package vrp
+
+import (
+ "fmt"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type ChannelInterval struct {
+ Size IntInterval
+}
+
+func (c ChannelInterval) Union(other Range) Range {
+ i, ok := other.(ChannelInterval)
+ if !ok {
+ i = ChannelInterval{EmptyIntInterval}
+ }
+ if c.Size.Empty() || !c.Size.IsKnown() {
+ return i
+ }
+ if i.Size.Empty() || !i.Size.IsKnown() {
+ return c
+ }
+ return ChannelInterval{
+ Size: c.Size.Union(i.Size).(IntInterval),
+ }
+}
+
+func (c ChannelInterval) String() string {
+ return c.Size.String()
+}
+
+func (c ChannelInterval) IsKnown() bool {
+ return c.Size.IsKnown()
+}
+
+type MakeChannelConstraint struct {
+ aConstraint
+ Buffer ssa.Value
+}
+type ChannelChangeTypeConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+func NewMakeChannelConstraint(buffer, y ssa.Value) Constraint {
+ return &MakeChannelConstraint{NewConstraint(y), buffer}
+}
+func NewChannelChangeTypeConstraint(x, y ssa.Value) Constraint {
+ return &ChannelChangeTypeConstraint{NewConstraint(y), x}
+}
+
+func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Value{c.Buffer} }
+func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+
+func (c *MakeChannelConstraint) String() string {
+ return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name, c.Buffer.Name())
+}
+func (c *ChannelChangeTypeConstraint) String() string {
+ return fmt.Sprintf("%s = changetype(%s)", c.Y().Name, c.X.Name())
+}
+
+func (c *MakeChannelConstraint) Eval(g *Graph) Range {
+ i, ok := g.Range(c.Buffer).(IntInterval)
+ if !ok {
+ return ChannelInterval{NewIntInterval(NewZ(0), PInfinity)}
+ }
+ if i.Lower.Sign() == -1 {
+ i.Lower = NewZ(0)
+ }
+ return ChannelInterval{i}
+}
+func (c *ChannelChangeTypeConstraint) Eval(g *Graph) Range { return g.Range(c.X) }
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/int.go b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go
new file mode 100644
index 0000000..926bb7a
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go
@@ -0,0 +1,476 @@
+package vrp
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "math/big"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type Zs []Z
+
+func (zs Zs) Len() int {
+ return len(zs)
+}
+
+func (zs Zs) Less(i int, j int) bool {
+ return zs[i].Cmp(zs[j]) == -1
+}
+
+func (zs Zs) Swap(i int, j int) {
+ zs[i], zs[j] = zs[j], zs[i]
+}
+
+type Z struct {
+ infinity int8
+ integer *big.Int
+}
+
+func NewZ(n int64) Z {
+ return NewBigZ(big.NewInt(n))
+}
+
+func NewBigZ(n *big.Int) Z {
+ return Z{integer: n}
+}
+
+func (z1 Z) Infinite() bool {
+ return z1.infinity != 0
+}
+
+func (z1 Z) Add(z2 Z) Z {
+ if z2.Sign() == -1 {
+ return z1.Sub(z2.Negate())
+ }
+ if z1 == NInfinity {
+ return NInfinity
+ }
+ if z1 == PInfinity {
+ return PInfinity
+ }
+ if z2 == PInfinity {
+ return PInfinity
+ }
+
+ if !z1.Infinite() && !z2.Infinite() {
+ n := &big.Int{}
+ n.Add(z1.integer, z2.integer)
+ return NewBigZ(n)
+ }
+
+ panic(fmt.Sprintf("%s + %s is not defined", z1, z2))
+}
+
+func (z1 Z) Sub(z2 Z) Z {
+ if z2.Sign() == -1 {
+ return z1.Add(z2.Negate())
+ }
+ if !z1.Infinite() && !z2.Infinite() {
+ n := &big.Int{}
+ n.Sub(z1.integer, z2.integer)
+ return NewBigZ(n)
+ }
+
+ if z1 != PInfinity && z2 == PInfinity {
+ return NInfinity
+ }
+ if z1.Infinite() && !z2.Infinite() {
+ return Z{infinity: z1.infinity}
+ }
+ if z1 == PInfinity && z2 == PInfinity {
+ return PInfinity
+ }
+ panic(fmt.Sprintf("%s - %s is not defined", z1, z2))
+}
+
+func (z1 Z) Mul(z2 Z) Z {
+ if (z1.integer != nil && z1.integer.Sign() == 0) ||
+ (z2.integer != nil && z2.integer.Sign() == 0) {
+ return NewBigZ(&big.Int{})
+ }
+
+ if z1.infinity != 0 || z2.infinity != 0 {
+ return Z{infinity: int8(z1.Sign() * z2.Sign())}
+ }
+
+ n := &big.Int{}
+ n.Mul(z1.integer, z2.integer)
+ return NewBigZ(n)
+}
+
+func (z1 Z) Negate() Z {
+ if z1.infinity == 1 {
+ return NInfinity
+ }
+ if z1.infinity == -1 {
+ return PInfinity
+ }
+ n := &big.Int{}
+ n.Neg(z1.integer)
+ return NewBigZ(n)
+}
+
+func (z1 Z) Sign() int {
+ if z1.infinity != 0 {
+ return int(z1.infinity)
+ }
+ return z1.integer.Sign()
+}
+
+func (z1 Z) String() string {
+ if z1 == NInfinity {
+ return "-∞"
+ }
+ if z1 == PInfinity {
+ return "∞"
+ }
+ return fmt.Sprintf("%d", z1.integer)
+}
+
+func (z1 Z) Cmp(z2 Z) int {
+ if z1.infinity == z2.infinity && z1.infinity != 0 {
+ return 0
+ }
+ if z1 == PInfinity {
+ return 1
+ }
+ if z1 == NInfinity {
+ return -1
+ }
+ if z2 == NInfinity {
+ return 1
+ }
+ if z2 == PInfinity {
+ return -1
+ }
+ return z1.integer.Cmp(z2.integer)
+}
+
+func MaxZ(zs ...Z) Z {
+ if len(zs) == 0 {
+ panic("Max called with no arguments")
+ }
+ if len(zs) == 1 {
+ return zs[0]
+ }
+ ret := zs[0]
+ for _, z := range zs[1:] {
+ if z.Cmp(ret) == 1 {
+ ret = z
+ }
+ }
+ return ret
+}
+
+func MinZ(zs ...Z) Z {
+ if len(zs) == 0 {
+ panic("Min called with no arguments")
+ }
+ if len(zs) == 1 {
+ return zs[0]
+ }
+ ret := zs[0]
+ for _, z := range zs[1:] {
+ if z.Cmp(ret) == -1 {
+ ret = z
+ }
+ }
+ return ret
+}
+
+var NInfinity = Z{infinity: -1}
+var PInfinity = Z{infinity: 1}
+var EmptyIntInterval = IntInterval{true, PInfinity, NInfinity}
+
+func InfinityFor(v ssa.Value) IntInterval {
+ if b, ok := v.Type().Underlying().(*types.Basic); ok {
+ if (b.Info() & types.IsUnsigned) != 0 {
+ return NewIntInterval(NewZ(0), PInfinity)
+ }
+ }
+ return NewIntInterval(NInfinity, PInfinity)
+}
+
+type IntInterval struct {
+ known bool
+ Lower Z
+ Upper Z
+}
+
+func NewIntInterval(l, u Z) IntInterval {
+ if u.Cmp(l) == -1 {
+ return EmptyIntInterval
+ }
+ return IntInterval{known: true, Lower: l, Upper: u}
+}
+
+func (i IntInterval) IsKnown() bool {
+ return i.known
+}
+
+func (i IntInterval) Empty() bool {
+ return i.Lower == PInfinity && i.Upper == NInfinity
+}
+
+func (i IntInterval) IsMaxRange() bool {
+ return i.Lower == NInfinity && i.Upper == PInfinity
+}
+
+func (i1 IntInterval) Intersection(i2 IntInterval) IntInterval {
+ if !i1.IsKnown() {
+ return i2
+ }
+ if !i2.IsKnown() {
+ return i1
+ }
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ i3 := NewIntInterval(MaxZ(i1.Lower, i2.Lower), MinZ(i1.Upper, i2.Upper))
+ if i3.Lower.Cmp(i3.Upper) == 1 {
+ return EmptyIntInterval
+ }
+ return i3
+}
+
+func (i1 IntInterval) Union(other Range) Range {
+ i2, ok := other.(IntInterval)
+ if !ok {
+ i2 = EmptyIntInterval
+ }
+ if i1.Empty() || !i1.IsKnown() {
+ return i2
+ }
+ if i2.Empty() || !i2.IsKnown() {
+ return i1
+ }
+ return NewIntInterval(MinZ(i1.Lower, i2.Lower), MaxZ(i1.Upper, i2.Upper))
+}
+
+func (i1 IntInterval) Add(i2 IntInterval) IntInterval {
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper
+ return NewIntInterval(l1.Add(l2), u1.Add(u2))
+}
+
+func (i1 IntInterval) Sub(i2 IntInterval) IntInterval {
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper
+ return NewIntInterval(l1.Sub(u2), u1.Sub(l2))
+}
+
+func (i1 IntInterval) Mul(i2 IntInterval) IntInterval {
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ x1, x2 := i1.Lower, i1.Upper
+ y1, y2 := i2.Lower, i2.Upper
+ return NewIntInterval(
+ MinZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)),
+ MaxZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)),
+ )
+}
+
+func (i1 IntInterval) String() string {
+ if !i1.IsKnown() {
+ return "[⊥, ⊥]"
+ }
+ if i1.Empty() {
+ return "{}"
+ }
+ return fmt.Sprintf("[%s, %s]", i1.Lower, i1.Upper)
+}
+
+type IntArithmeticConstraint struct {
+ aConstraint
+ A ssa.Value
+ B ssa.Value
+ Op token.Token
+ Fn func(IntInterval, IntInterval) IntInterval
+}
+
+type IntAddConstraint struct{ *IntArithmeticConstraint }
+type IntSubConstraint struct{ *IntArithmeticConstraint }
+type IntMulConstraint struct{ *IntArithmeticConstraint }
+
+type IntConversionConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+type IntIntersectionConstraint struct {
+ aConstraint
+ ranges Ranges
+ A ssa.Value
+ B ssa.Value
+ Op token.Token
+ I IntInterval
+ resolved bool
+}
+
+type IntIntervalConstraint struct {
+ aConstraint
+ I IntInterval
+}
+
+func NewIntArithmeticConstraint(a, b, y ssa.Value, op token.Token, fn func(IntInterval, IntInterval) IntInterval) *IntArithmeticConstraint {
+ return &IntArithmeticConstraint{NewConstraint(y), a, b, op, fn}
+}
+func NewIntAddConstraint(a, b, y ssa.Value) Constraint {
+ return &IntAddConstraint{NewIntArithmeticConstraint(a, b, y, token.ADD, IntInterval.Add)}
+}
+func NewIntSubConstraint(a, b, y ssa.Value) Constraint {
+ return &IntSubConstraint{NewIntArithmeticConstraint(a, b, y, token.SUB, IntInterval.Sub)}
+}
+func NewIntMulConstraint(a, b, y ssa.Value) Constraint {
+ return &IntMulConstraint{NewIntArithmeticConstraint(a, b, y, token.MUL, IntInterval.Mul)}
+}
+func NewIntConversionConstraint(x, y ssa.Value) Constraint {
+ return &IntConversionConstraint{NewConstraint(y), x}
+}
+func NewIntIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint {
+ return &IntIntersectionConstraint{
+ aConstraint: NewConstraint(y),
+ ranges: ranges,
+ A: a,
+ B: b,
+ Op: op,
+ }
+}
+func NewIntIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
+ return &IntIntervalConstraint{NewConstraint(y), i}
+}
+
+func (c *IntArithmeticConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
+func (c *IntConversionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (c *IntIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} }
+func (s *IntIntervalConstraint) Operands() []ssa.Value { return nil }
+
+func (c *IntArithmeticConstraint) String() string {
+ return fmt.Sprintf("%s = %s %s %s", c.Y().Name(), c.A.Name(), c.Op, c.B.Name())
+}
+func (c *IntConversionConstraint) String() string {
+ return fmt.Sprintf("%s = %s(%s)", c.Y().Name(), c.Y().Type(), c.X.Name())
+}
+func (c *IntIntersectionConstraint) String() string {
+ return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch)
+}
+func (c *IntIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
+
+func (c *IntArithmeticConstraint) Eval(g *Graph) Range {
+ i1, i2 := g.Range(c.A).(IntInterval), g.Range(c.B).(IntInterval)
+ if !i1.IsKnown() || !i2.IsKnown() {
+ return IntInterval{}
+ }
+ return c.Fn(i1, i2)
+}
+func (c *IntConversionConstraint) Eval(g *Graph) Range {
+ s := &types.StdSizes{
+ // XXX is it okay to assume the largest word size, or do we
+ // need to be platform specific?
+ WordSize: 8,
+ MaxAlign: 1,
+ }
+ fromI := g.Range(c.X).(IntInterval)
+ toI := g.Range(c.Y()).(IntInterval)
+ fromT := c.X.Type().Underlying().(*types.Basic)
+ toT := c.Y().Type().Underlying().(*types.Basic)
+ fromB := s.Sizeof(c.X.Type())
+ toB := s.Sizeof(c.Y().Type())
+
+ if !fromI.IsKnown() {
+ return toI
+ }
+ if !toI.IsKnown() {
+ return fromI
+ }
+
+ // uint<N> -> sint/uint<M>, M > N: [max(0, l1), min(2**N-1, u2)]
+ if (fromT.Info()&types.IsUnsigned != 0) &&
+ toB > fromB {
+
+ n := big.NewInt(1)
+ n.Lsh(n, uint(fromB*8))
+ n.Sub(n, big.NewInt(1))
+ return NewIntInterval(
+ MaxZ(NewZ(0), fromI.Lower),
+ MinZ(NewBigZ(n), toI.Upper),
+ )
+ }
+
+ // sint<N> -> sint<M>, M > N; [max(-∞, l1), min(2**N-1, u2)]
+ if (fromT.Info()&types.IsUnsigned == 0) &&
+ (toT.Info()&types.IsUnsigned == 0) &&
+ toB > fromB {
+
+ n := big.NewInt(1)
+ n.Lsh(n, uint(fromB*8))
+ n.Sub(n, big.NewInt(1))
+ return NewIntInterval(
+ MaxZ(NInfinity, fromI.Lower),
+ MinZ(NewBigZ(n), toI.Upper),
+ )
+ }
+
+ return fromI
+}
+func (c *IntIntersectionConstraint) Eval(g *Graph) Range {
+ xi := g.Range(c.A).(IntInterval)
+ if !xi.IsKnown() {
+ return c.I
+ }
+ return xi.Intersection(c.I)
+}
+func (c *IntIntervalConstraint) Eval(*Graph) Range { return c.I }
+
+func (c *IntIntersectionConstraint) Futures() []ssa.Value {
+ return []ssa.Value{c.B}
+}
+
+func (c *IntIntersectionConstraint) Resolve() {
+ r, ok := c.ranges[c.B].(IntInterval)
+ if !ok {
+ c.I = InfinityFor(c.Y())
+ return
+ }
+
+ switch c.Op {
+ case token.EQL:
+ c.I = r
+ case token.GTR:
+ c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity)
+ case token.GEQ:
+ c.I = NewIntInterval(r.Lower, PInfinity)
+ case token.LSS:
+ // TODO(dh): do we need 0 instead of NInfinity for uints?
+ c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1)))
+ case token.LEQ:
+ c.I = NewIntInterval(NInfinity, r.Upper)
+ case token.NEQ:
+ c.I = InfinityFor(c.Y())
+ default:
+ panic("unsupported op " + c.Op.String())
+ }
+}
+
+func (c *IntIntersectionConstraint) IsKnown() bool {
+ return c.I.IsKnown()
+}
+
+func (c *IntIntersectionConstraint) MarkUnresolved() {
+ c.resolved = false
+}
+
+func (c *IntIntersectionConstraint) MarkResolved() {
+ c.resolved = true
+}
+
+func (c *IntIntersectionConstraint) IsResolved() bool {
+ return c.resolved
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go
new file mode 100644
index 0000000..40658dd
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go
@@ -0,0 +1,273 @@
+package vrp
+
+// TODO(dh): most of the constraints have implementations identical to
+// that of strings. Consider reusing them.
+
+import (
+ "fmt"
+ "go/types"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type SliceInterval struct {
+ Length IntInterval
+}
+
+func (s SliceInterval) Union(other Range) Range {
+ i, ok := other.(SliceInterval)
+ if !ok {
+ i = SliceInterval{EmptyIntInterval}
+ }
+ if s.Length.Empty() || !s.Length.IsKnown() {
+ return i
+ }
+ if i.Length.Empty() || !i.Length.IsKnown() {
+ return s
+ }
+ return SliceInterval{
+ Length: s.Length.Union(i.Length).(IntInterval),
+ }
+}
+func (s SliceInterval) String() string { return s.Length.String() }
+func (s SliceInterval) IsKnown() bool { return s.Length.IsKnown() }
+
+type SliceAppendConstraint struct {
+ aConstraint
+ A ssa.Value
+ B ssa.Value
+}
+
+type SliceSliceConstraint struct {
+ aConstraint
+ X ssa.Value
+ Lower ssa.Value
+ Upper ssa.Value
+}
+
+type ArraySliceConstraint struct {
+ aConstraint
+ X ssa.Value
+ Lower ssa.Value
+ Upper ssa.Value
+}
+
+type SliceIntersectionConstraint struct {
+ aConstraint
+ X ssa.Value
+ I IntInterval
+}
+
+type SliceLengthConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+type MakeSliceConstraint struct {
+ aConstraint
+ Size ssa.Value
+}
+
+type SliceIntervalConstraint struct {
+ aConstraint
+ I IntInterval
+}
+
+func NewSliceAppendConstraint(a, b, y ssa.Value) Constraint {
+ return &SliceAppendConstraint{NewConstraint(y), a, b}
+}
+func NewSliceSliceConstraint(x, lower, upper, y ssa.Value) Constraint {
+ return &SliceSliceConstraint{NewConstraint(y), x, lower, upper}
+}
+func NewArraySliceConstraint(x, lower, upper, y ssa.Value) Constraint {
+ return &ArraySliceConstraint{NewConstraint(y), x, lower, upper}
+}
+func NewSliceIntersectionConstraint(x ssa.Value, i IntInterval, y ssa.Value) Constraint {
+ return &SliceIntersectionConstraint{NewConstraint(y), x, i}
+}
+func NewSliceLengthConstraint(x, y ssa.Value) Constraint {
+ return &SliceLengthConstraint{NewConstraint(y), x}
+}
+func NewMakeSliceConstraint(size, y ssa.Value) Constraint {
+ return &MakeSliceConstraint{NewConstraint(y), size}
+}
+func NewSliceIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
+ return &SliceIntervalConstraint{NewConstraint(y), i}
+}
+
+func (c *SliceAppendConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
+func (c *SliceSliceConstraint) Operands() []ssa.Value {
+ ops := []ssa.Value{c.X}
+ if c.Lower != nil {
+ ops = append(ops, c.Lower)
+ }
+ if c.Upper != nil {
+ ops = append(ops, c.Upper)
+ }
+ return ops
+}
+func (c *ArraySliceConstraint) Operands() []ssa.Value {
+ ops := []ssa.Value{c.X}
+ if c.Lower != nil {
+ ops = append(ops, c.Lower)
+ }
+ if c.Upper != nil {
+ ops = append(ops, c.Upper)
+ }
+ return ops
+}
+func (c *SliceIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (c *SliceLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (c *MakeSliceConstraint) Operands() []ssa.Value { return []ssa.Value{c.Size} }
+func (s *SliceIntervalConstraint) Operands() []ssa.Value { return nil }
+
+func (c *SliceAppendConstraint) String() string {
+ return fmt.Sprintf("%s = append(%s, %s)", c.Y().Name(), c.A.Name(), c.B.Name())
+}
+func (c *SliceSliceConstraint) String() string {
+ var lname, uname string
+ if c.Lower != nil {
+ lname = c.Lower.Name()
+ }
+ if c.Upper != nil {
+ uname = c.Upper.Name()
+ }
+ return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
+}
+func (c *ArraySliceConstraint) String() string {
+ var lname, uname string
+ if c.Lower != nil {
+ lname = c.Lower.Name()
+ }
+ if c.Upper != nil {
+ uname = c.Upper.Name()
+ }
+ return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
+}
+func (c *SliceIntersectionConstraint) String() string {
+ return fmt.Sprintf("%s = %s.%t ⊓ %s", c.Y().Name(), c.X.Name(), c.Y().(*ssa.Sigma).Branch, c.I)
+}
+func (c *SliceLengthConstraint) String() string {
+ return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name())
+}
+func (c *MakeSliceConstraint) String() string {
+ return fmt.Sprintf("%s = make(slice, %s)", c.Y().Name(), c.Size.Name())
+}
+func (c *SliceIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
+
+func (c *SliceAppendConstraint) Eval(g *Graph) Range {
+ l1 := g.Range(c.A).(SliceInterval).Length
+ var l2 IntInterval
+ switch r := g.Range(c.B).(type) {
+ case SliceInterval:
+ l2 = r.Length
+ case StringInterval:
+ l2 = r.Length
+ default:
+ return SliceInterval{}
+ }
+ if !l1.IsKnown() || !l2.IsKnown() {
+ return SliceInterval{}
+ }
+ return SliceInterval{
+ Length: l1.Add(l2),
+ }
+}
+func (c *SliceSliceConstraint) Eval(g *Graph) Range {
+ lr := NewIntInterval(NewZ(0), NewZ(0))
+ if c.Lower != nil {
+ lr = g.Range(c.Lower).(IntInterval)
+ }
+ ur := g.Range(c.X).(SliceInterval).Length
+ if c.Upper != nil {
+ ur = g.Range(c.Upper).(IntInterval)
+ }
+ if !lr.IsKnown() || !ur.IsKnown() {
+ return SliceInterval{}
+ }
+
+ ls := []Z{
+ ur.Lower.Sub(lr.Lower),
+ ur.Upper.Sub(lr.Lower),
+ ur.Lower.Sub(lr.Upper),
+ ur.Upper.Sub(lr.Upper),
+ }
+ // TODO(dh): if we don't truncate lengths to 0 we might be able to
+ // easily detect slices with high < low. we'd need to treat -∞
+ // specially, though.
+ for i, l := range ls {
+ if l.Sign() == -1 {
+ ls[i] = NewZ(0)
+ }
+ }
+
+ return SliceInterval{
+ Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
+ }
+}
+func (c *ArraySliceConstraint) Eval(g *Graph) Range {
+ lr := NewIntInterval(NewZ(0), NewZ(0))
+ if c.Lower != nil {
+ lr = g.Range(c.Lower).(IntInterval)
+ }
+ var l int64
+ switch typ := c.X.Type().(type) {
+ case *types.Array:
+ l = typ.Len()
+ case *types.Pointer:
+ l = typ.Elem().(*types.Array).Len()
+ }
+ ur := NewIntInterval(NewZ(l), NewZ(l))
+ if c.Upper != nil {
+ ur = g.Range(c.Upper).(IntInterval)
+ }
+ if !lr.IsKnown() || !ur.IsKnown() {
+ return SliceInterval{}
+ }
+
+ ls := []Z{
+ ur.Lower.Sub(lr.Lower),
+ ur.Upper.Sub(lr.Lower),
+ ur.Lower.Sub(lr.Upper),
+ ur.Upper.Sub(lr.Upper),
+ }
+ // TODO(dh): if we don't truncate lengths to 0 we might be able to
+ // easily detect slices with high < low. we'd need to treat -∞
+ // specially, though.
+ for i, l := range ls {
+ if l.Sign() == -1 {
+ ls[i] = NewZ(0)
+ }
+ }
+
+ return SliceInterval{
+ Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
+ }
+}
+func (c *SliceIntersectionConstraint) Eval(g *Graph) Range {
+ xi := g.Range(c.X).(SliceInterval)
+ if !xi.IsKnown() {
+ return c.I
+ }
+ return SliceInterval{
+ Length: xi.Length.Intersection(c.I),
+ }
+}
+func (c *SliceLengthConstraint) Eval(g *Graph) Range {
+ i := g.Range(c.X).(SliceInterval).Length
+ if !i.IsKnown() {
+ return NewIntInterval(NewZ(0), PInfinity)
+ }
+ return i
+}
+func (c *MakeSliceConstraint) Eval(g *Graph) Range {
+ i, ok := g.Range(c.Size).(IntInterval)
+ if !ok {
+ return SliceInterval{NewIntInterval(NewZ(0), PInfinity)}
+ }
+ if i.Lower.Sign() == -1 {
+ i.Lower = NewZ(0)
+ }
+ return SliceInterval{i}
+}
+func (c *SliceIntervalConstraint) Eval(*Graph) Range { return SliceInterval{c.I} }
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/string.go b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go
new file mode 100644
index 0000000..e05877f
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go
@@ -0,0 +1,258 @@
+package vrp
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type StringInterval struct {
+ Length IntInterval
+}
+
+func (s StringInterval) Union(other Range) Range {
+ i, ok := other.(StringInterval)
+ if !ok {
+ i = StringInterval{EmptyIntInterval}
+ }
+ if s.Length.Empty() || !s.Length.IsKnown() {
+ return i
+ }
+ if i.Length.Empty() || !i.Length.IsKnown() {
+ return s
+ }
+ return StringInterval{
+ Length: s.Length.Union(i.Length).(IntInterval),
+ }
+}
+
+func (s StringInterval) String() string {
+ return s.Length.String()
+}
+
+func (s StringInterval) IsKnown() bool {
+ return s.Length.IsKnown()
+}
+
+type StringSliceConstraint struct {
+ aConstraint
+ X ssa.Value
+ Lower ssa.Value
+ Upper ssa.Value
+}
+
+type StringIntersectionConstraint struct {
+ aConstraint
+ ranges Ranges
+ A ssa.Value
+ B ssa.Value
+ Op token.Token
+ I IntInterval
+ resolved bool
+}
+
+type StringConcatConstraint struct {
+ aConstraint
+ A ssa.Value
+ B ssa.Value
+}
+
+type StringLengthConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+type StringIntervalConstraint struct {
+ aConstraint
+ I IntInterval
+}
+
+func NewStringSliceConstraint(x, lower, upper, y ssa.Value) Constraint {
+ return &StringSliceConstraint{NewConstraint(y), x, lower, upper}
+}
+func NewStringIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint {
+ return &StringIntersectionConstraint{
+ aConstraint: NewConstraint(y),
+ ranges: ranges,
+ A: a,
+ B: b,
+ Op: op,
+ }
+}
+func NewStringConcatConstraint(a, b, y ssa.Value) Constraint {
+ return &StringConcatConstraint{NewConstraint(y), a, b}
+}
+func NewStringLengthConstraint(x ssa.Value, y ssa.Value) Constraint {
+ return &StringLengthConstraint{NewConstraint(y), x}
+}
+func NewStringIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
+ return &StringIntervalConstraint{NewConstraint(y), i}
+}
+
+func (c *StringSliceConstraint) Operands() []ssa.Value {
+ vs := []ssa.Value{c.X}
+ if c.Lower != nil {
+ vs = append(vs, c.Lower)
+ }
+ if c.Upper != nil {
+ vs = append(vs, c.Upper)
+ }
+ return vs
+}
+func (c *StringIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} }
+func (c StringConcatConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
+func (c *StringLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (s *StringIntervalConstraint) Operands() []ssa.Value { return nil }
+
+func (c *StringSliceConstraint) String() string {
+ var lname, uname string
+ if c.Lower != nil {
+ lname = c.Lower.Name()
+ }
+ if c.Upper != nil {
+ uname = c.Upper.Name()
+ }
+ return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
+}
+func (c *StringIntersectionConstraint) String() string {
+ return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch)
+}
+func (c StringConcatConstraint) String() string {
+ return fmt.Sprintf("%s = %s + %s", c.Y().Name(), c.A.Name(), c.B.Name())
+}
+func (c *StringLengthConstraint) String() string {
+ return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name())
+}
+func (c *StringIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
+
+func (c *StringSliceConstraint) Eval(g *Graph) Range {
+ lr := NewIntInterval(NewZ(0), NewZ(0))
+ if c.Lower != nil {
+ lr = g.Range(c.Lower).(IntInterval)
+ }
+ ur := g.Range(c.X).(StringInterval).Length
+ if c.Upper != nil {
+ ur = g.Range(c.Upper).(IntInterval)
+ }
+ if !lr.IsKnown() || !ur.IsKnown() {
+ return StringInterval{}
+ }
+
+ ls := []Z{
+ ur.Lower.Sub(lr.Lower),
+ ur.Upper.Sub(lr.Lower),
+ ur.Lower.Sub(lr.Upper),
+ ur.Upper.Sub(lr.Upper),
+ }
+ // TODO(dh): if we don't truncate lengths to 0 we might be able to
+ // easily detect slices with high < low. we'd need to treat -∞
+ // specially, though.
+ for i, l := range ls {
+ if l.Sign() == -1 {
+ ls[i] = NewZ(0)
+ }
+ }
+
+ return StringInterval{
+ Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
+ }
+}
+func (c *StringIntersectionConstraint) Eval(g *Graph) Range {
+ var l IntInterval
+ switch r := g.Range(c.A).(type) {
+ case StringInterval:
+ l = r.Length
+ case IntInterval:
+ l = r
+ }
+
+ if !l.IsKnown() {
+ return StringInterval{c.I}
+ }
+ return StringInterval{
+ Length: l.Intersection(c.I),
+ }
+}
+func (c StringConcatConstraint) Eval(g *Graph) Range {
+ i1, i2 := g.Range(c.A).(StringInterval), g.Range(c.B).(StringInterval)
+ if !i1.Length.IsKnown() || !i2.Length.IsKnown() {
+ return StringInterval{}
+ }
+ return StringInterval{
+ Length: i1.Length.Add(i2.Length),
+ }
+}
+func (c *StringLengthConstraint) Eval(g *Graph) Range {
+ i := g.Range(c.X).(StringInterval).Length
+ if !i.IsKnown() {
+ return NewIntInterval(NewZ(0), PInfinity)
+ }
+ return i
+}
+func (c *StringIntervalConstraint) Eval(*Graph) Range { return StringInterval{c.I} }
+
+func (c *StringIntersectionConstraint) Futures() []ssa.Value {
+ return []ssa.Value{c.B}
+}
+
+func (c *StringIntersectionConstraint) Resolve() {
+ if (c.A.Type().Underlying().(*types.Basic).Info() & types.IsString) != 0 {
+ // comparing two strings
+ r, ok := c.ranges[c.B].(StringInterval)
+ if !ok {
+ c.I = NewIntInterval(NewZ(0), PInfinity)
+ return
+ }
+ switch c.Op {
+ case token.EQL:
+ c.I = r.Length
+ case token.GTR, token.GEQ:
+ c.I = NewIntInterval(r.Length.Lower, PInfinity)
+ case token.LSS, token.LEQ:
+ c.I = NewIntInterval(NewZ(0), r.Length.Upper)
+ case token.NEQ:
+ default:
+ panic("unsupported op " + c.Op.String())
+ }
+ } else {
+ r, ok := c.ranges[c.B].(IntInterval)
+ if !ok {
+ c.I = NewIntInterval(NewZ(0), PInfinity)
+ return
+ }
+ // comparing two lengths
+ switch c.Op {
+ case token.EQL:
+ c.I = r
+ case token.GTR:
+ c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity)
+ case token.GEQ:
+ c.I = NewIntInterval(r.Lower, PInfinity)
+ case token.LSS:
+ c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1)))
+ case token.LEQ:
+ c.I = NewIntInterval(NInfinity, r.Upper)
+ case token.NEQ:
+ default:
+ panic("unsupported op " + c.Op.String())
+ }
+ }
+}
+
+func (c *StringIntersectionConstraint) IsKnown() bool {
+ return c.I.IsKnown()
+}
+
+func (c *StringIntersectionConstraint) MarkUnresolved() {
+ c.resolved = false
+}
+
+func (c *StringIntersectionConstraint) MarkResolved() {
+ c.resolved = true
+}
+
+func (c *StringIntersectionConstraint) IsResolved() bool {
+ return c.resolved
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go
new file mode 100644
index 0000000..cb17f04
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go
@@ -0,0 +1,1049 @@
+package vrp
+
+// TODO(dh) widening and narrowing have a lot of code in common. Make
+// it reusable.
+
+import (
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "math/big"
+ "sort"
+ "strings"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type Future interface {
+ Constraint
+ Futures() []ssa.Value
+ Resolve()
+ IsKnown() bool
+ MarkUnresolved()
+ MarkResolved()
+ IsResolved() bool
+}
+
+type Range interface {
+ Union(other Range) Range
+ IsKnown() bool
+}
+
+type Constraint interface {
+ Y() ssa.Value
+ isConstraint()
+ String() string
+ Eval(*Graph) Range
+ Operands() []ssa.Value
+}
+
+type aConstraint struct {
+ y ssa.Value
+}
+
+func NewConstraint(y ssa.Value) aConstraint {
+ return aConstraint{y}
+}
+
+func (aConstraint) isConstraint() {}
+func (c aConstraint) Y() ssa.Value { return c.y }
+
+type PhiConstraint struct {
+ aConstraint
+ Vars []ssa.Value
+}
+
+func NewPhiConstraint(vars []ssa.Value, y ssa.Value) Constraint {
+ uniqm := map[ssa.Value]struct{}{}
+ for _, v := range vars {
+ uniqm[v] = struct{}{}
+ }
+ var uniq []ssa.Value
+ for v := range uniqm {
+ uniq = append(uniq, v)
+ }
+ return &PhiConstraint{
+ aConstraint: NewConstraint(y),
+ Vars: uniq,
+ }
+}
+
+func (c *PhiConstraint) Operands() []ssa.Value {
+ return c.Vars
+}
+
+func (c *PhiConstraint) Eval(g *Graph) Range {
+ i := Range(nil)
+ for _, v := range c.Vars {
+ i = g.Range(v).Union(i)
+ }
+ return i
+}
+
+func (c *PhiConstraint) String() string {
+ names := make([]string, len(c.Vars))
+ for i, v := range c.Vars {
+ names[i] = v.Name()
+ }
+ return fmt.Sprintf("%s = φ(%s)", c.Y().Name(), strings.Join(names, ", "))
+}
+
+func isSupportedType(typ types.Type) bool {
+ switch typ := typ.Underlying().(type) {
+ case *types.Basic:
+ switch typ.Kind() {
+ case types.String, types.UntypedString:
+ return true
+ default:
+ if (typ.Info() & types.IsInteger) == 0 {
+ return false
+ }
+ }
+ case *types.Chan:
+ return true
+ case *types.Slice:
+ return true
+ default:
+ return false
+ }
+ return true
+}
+
+func ConstantToZ(c constant.Value) Z {
+ s := constant.ToInt(c).ExactString()
+ n := &big.Int{}
+ n.SetString(s, 10)
+ return NewBigZ(n)
+}
+
+func sigmaInteger(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint {
+ op := cond.Op
+ if !ins.Branch {
+ op = (invertToken(op))
+ }
+
+ switch op {
+ case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ:
+ default:
+ return nil
+ }
+ var a, b ssa.Value
+ if (*ops[0]) == ins.X {
+ a = *ops[0]
+ b = *ops[1]
+ } else {
+ a = *ops[1]
+ b = *ops[0]
+ op = flipToken(op)
+ }
+ return NewIntIntersectionConstraint(a, b, op, g.ranges, ins)
+}
+
+func sigmaString(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint {
+ op := cond.Op
+ if !ins.Branch {
+ op = (invertToken(op))
+ }
+
+ switch op {
+ case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ:
+ default:
+ return nil
+ }
+
+ if ((*ops[0]).Type().Underlying().(*types.Basic).Info() & types.IsString) == 0 {
+ var a, b ssa.Value
+ call, ok := (*ops[0]).(*ssa.Call)
+ if ok && call.Common().Args[0] == ins.X {
+ a = *ops[0]
+ b = *ops[1]
+ } else {
+ a = *ops[1]
+ b = *ops[0]
+ op = flipToken(op)
+ }
+ return NewStringIntersectionConstraint(a, b, op, g.ranges, ins)
+ }
+ var a, b ssa.Value
+ if (*ops[0]) == ins.X {
+ a = *ops[0]
+ b = *ops[1]
+ } else {
+ a = *ops[1]
+ b = *ops[0]
+ op = flipToken(op)
+ }
+ return NewStringIntersectionConstraint(a, b, op, g.ranges, ins)
+}
+
+func sigmaSlice(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint {
+ // TODO(dh) sigmaSlice and sigmaString are a lot alike. Can they
+ // be merged?
+ //
+ // XXX support futures
+
+ op := cond.Op
+ if !ins.Branch {
+ op = (invertToken(op))
+ }
+
+ k, ok := (*ops[1]).(*ssa.Const)
+ // XXX investigate in what cases this wouldn't be a Const
+ //
+ // XXX what if left and right are swapped?
+ if !ok {
+ return nil
+ }
+
+ call, ok := (*ops[0]).(*ssa.Call)
+ if !ok {
+ return nil
+ }
+ builtin, ok := call.Common().Value.(*ssa.Builtin)
+ if !ok {
+ return nil
+ }
+ if builtin.Name() != "len" {
+ return nil
+ }
+ callops := call.Operands(nil)
+
+ v := ConstantToZ(k.Value)
+ c := NewSliceIntersectionConstraint(*callops[1], IntInterval{}, ins).(*SliceIntersectionConstraint)
+ switch op {
+ case token.EQL:
+ c.I = NewIntInterval(v, v)
+ case token.GTR, token.GEQ:
+ off := int64(0)
+ if cond.Op == token.GTR {
+ off = 1
+ }
+ c.I = NewIntInterval(
+ v.Add(NewZ(off)),
+ PInfinity,
+ )
+ case token.LSS, token.LEQ:
+ off := int64(0)
+ if cond.Op == token.LSS {
+ off = -1
+ }
+ c.I = NewIntInterval(
+ NInfinity,
+ v.Add(NewZ(off)),
+ )
+ default:
+ return nil
+ }
+ return c
+}
+
+func BuildGraph(f *ssa.Function) *Graph {
+ g := &Graph{
+ Vertices: map[interface{}]*Vertex{},
+ ranges: Ranges{},
+ }
+
+ var cs []Constraint
+
+ ops := make([]*ssa.Value, 16)
+ seen := map[ssa.Value]bool{}
+ for _, block := range f.Blocks {
+ for _, ins := range block.Instrs {
+ ops = ins.Operands(ops[:0])
+ for _, op := range ops {
+ if c, ok := (*op).(*ssa.Const); ok {
+ if seen[c] {
+ continue
+ }
+ seen[c] = true
+ if c.Value == nil {
+ switch c.Type().Underlying().(type) {
+ case *types.Slice:
+ cs = append(cs, NewSliceIntervalConstraint(NewIntInterval(NewZ(0), NewZ(0)), c))
+ }
+ continue
+ }
+ switch c.Value.Kind() {
+ case constant.Int:
+ v := ConstantToZ(c.Value)
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(v, v), c))
+ case constant.String:
+ s := constant.StringVal(c.Value)
+ n := NewZ(int64(len(s)))
+ cs = append(cs, NewStringIntervalConstraint(NewIntInterval(n, n), c))
+ }
+ }
+ }
+ }
+ }
+ for _, block := range f.Blocks {
+ for _, ins := range block.Instrs {
+ switch ins := ins.(type) {
+ case *ssa.Convert:
+ switch v := ins.Type().Underlying().(type) {
+ case *types.Basic:
+ if (v.Info() & types.IsInteger) == 0 {
+ continue
+ }
+ cs = append(cs, NewIntConversionConstraint(ins.X, ins))
+ }
+ case *ssa.Call:
+ if static := ins.Common().StaticCallee(); static != nil {
+ if fn, ok := static.Object().(*types.Func); ok {
+ switch fn.FullName() {
+ case "bytes.Index", "bytes.IndexAny", "bytes.IndexByte",
+ "bytes.IndexFunc", "bytes.IndexRune", "bytes.LastIndex",
+ "bytes.LastIndexAny", "bytes.LastIndexByte", "bytes.LastIndexFunc",
+ "strings.Index", "strings.IndexAny", "strings.IndexByte",
+ "strings.IndexFunc", "strings.IndexRune", "strings.LastIndex",
+ "strings.LastIndexAny", "strings.LastIndexByte", "strings.LastIndexFunc":
+ // TODO(dh): instead of limiting by +∞,
+ // limit by the upper bound of the passed
+ // string
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), PInfinity), ins))
+ case "bytes.Title", "bytes.ToLower", "bytes.ToTitle", "bytes.ToUpper",
+ "strings.Title", "strings.ToLower", "strings.ToTitle", "strings.ToUpper":
+ cs = append(cs, NewCopyConstraint(ins.Common().Args[0], ins))
+ case "bytes.ToLowerSpecial", "bytes.ToTitleSpecial", "bytes.ToUpperSpecial",
+ "strings.ToLowerSpecial", "strings.ToTitleSpecial", "strings.ToUpperSpecial":
+ cs = append(cs, NewCopyConstraint(ins.Common().Args[1], ins))
+ case "bytes.Compare", "strings.Compare":
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), NewZ(1)), ins))
+ case "bytes.Count", "strings.Count":
+ // TODO(dh): instead of limiting by +∞,
+ // limit by the upper bound of the passed
+ // string.
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins))
+ case "bytes.Map", "bytes.TrimFunc", "bytes.TrimLeft", "bytes.TrimLeftFunc",
+ "bytes.TrimRight", "bytes.TrimRightFunc", "bytes.TrimSpace",
+ "strings.Map", "strings.TrimFunc", "strings.TrimLeft", "strings.TrimLeftFunc",
+ "strings.TrimRight", "strings.TrimRightFunc", "strings.TrimSpace":
+ // TODO(dh): lower = 0, upper = upper of passed string
+ case "bytes.TrimPrefix", "bytes.TrimSuffix",
+ "strings.TrimPrefix", "strings.TrimSuffix":
+ // TODO(dh) range between "unmodified" and len(cutset) removed
+ case "(*bytes.Buffer).Cap", "(*bytes.Buffer).Len", "(*bytes.Reader).Len", "(*bytes.Reader).Size":
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins))
+ }
+ }
+ }
+ builtin, ok := ins.Common().Value.(*ssa.Builtin)
+ ops := ins.Operands(nil)
+ if !ok {
+ continue
+ }
+ switch builtin.Name() {
+ case "len":
+ switch op1 := (*ops[1]).Type().Underlying().(type) {
+ case *types.Basic:
+ if op1.Kind() == types.String || op1.Kind() == types.UntypedString {
+ cs = append(cs, NewStringLengthConstraint(*ops[1], ins))
+ }
+ case *types.Slice:
+ cs = append(cs, NewSliceLengthConstraint(*ops[1], ins))
+ }
+
+ case "append":
+ cs = append(cs, NewSliceAppendConstraint(ins.Common().Args[0], ins.Common().Args[1], ins))
+ }
+ case *ssa.BinOp:
+ ops := ins.Operands(nil)
+ basic, ok := (*ops[0]).Type().Underlying().(*types.Basic)
+ if !ok {
+ continue
+ }
+ switch basic.Kind() {
+ case types.Int, types.Int8, types.Int16, types.Int32, types.Int64,
+ types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt:
+ fns := map[token.Token]func(ssa.Value, ssa.Value, ssa.Value) Constraint{
+ token.ADD: NewIntAddConstraint,
+ token.SUB: NewIntSubConstraint,
+ token.MUL: NewIntMulConstraint,
+ // XXX support QUO, REM, SHL, SHR
+ }
+ fn, ok := fns[ins.Op]
+ if ok {
+ cs = append(cs, fn(*ops[0], *ops[1], ins))
+ }
+ case types.String, types.UntypedString:
+ if ins.Op == token.ADD {
+ cs = append(cs, NewStringConcatConstraint(*ops[0], *ops[1], ins))
+ }
+ }
+ case *ssa.Slice:
+ typ := ins.X.Type().Underlying()
+ switch typ := typ.(type) {
+ case *types.Basic:
+ cs = append(cs, NewStringSliceConstraint(ins.X, ins.Low, ins.High, ins))
+ case *types.Slice:
+ cs = append(cs, NewSliceSliceConstraint(ins.X, ins.Low, ins.High, ins))
+ case *types.Array:
+ cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins))
+ case *types.Pointer:
+ if _, ok := typ.Elem().(*types.Array); !ok {
+ continue
+ }
+ cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins))
+ }
+ case *ssa.Phi:
+ if !isSupportedType(ins.Type()) {
+ continue
+ }
+ ops := ins.Operands(nil)
+ dops := make([]ssa.Value, len(ops))
+ for i, op := range ops {
+ dops[i] = *op
+ }
+ cs = append(cs, NewPhiConstraint(dops, ins))
+ case *ssa.Sigma:
+ pred := ins.Block().Preds[0]
+ instrs := pred.Instrs
+ cond, ok := instrs[len(instrs)-1].(*ssa.If).Cond.(*ssa.BinOp)
+ ops := cond.Operands(nil)
+ if !ok {
+ continue
+ }
+ switch typ := ins.Type().Underlying().(type) {
+ case *types.Basic:
+ var c Constraint
+ switch typ.Kind() {
+ case types.Int, types.Int8, types.Int16, types.Int32, types.Int64,
+ types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt:
+ c = sigmaInteger(g, ins, cond, ops)
+ case types.String, types.UntypedString:
+ c = sigmaString(g, ins, cond, ops)
+ }
+ if c != nil {
+ cs = append(cs, c)
+ }
+ case *types.Slice:
+ c := sigmaSlice(g, ins, cond, ops)
+ if c != nil {
+ cs = append(cs, c)
+ }
+ default:
+ //log.Printf("unsupported sigma type %T", typ) // XXX
+ }
+ case *ssa.MakeChan:
+ cs = append(cs, NewMakeChannelConstraint(ins.Size, ins))
+ case *ssa.MakeSlice:
+ cs = append(cs, NewMakeSliceConstraint(ins.Len, ins))
+ case *ssa.ChangeType:
+ switch ins.X.Type().Underlying().(type) {
+ case *types.Chan:
+ cs = append(cs, NewChannelChangeTypeConstraint(ins.X, ins))
+ }
+ }
+ }
+ }
+
+ for _, c := range cs {
+ if c == nil {
+ panic("nil constraint")
+ }
+ // If V is used in constraint C, then we create an edge V->C
+ for _, op := range c.Operands() {
+ g.AddEdge(op, c, false)
+ }
+ if c, ok := c.(Future); ok {
+ for _, op := range c.Futures() {
+ g.AddEdge(op, c, true)
+ }
+ }
+ // If constraint C defines variable V, then we create an edge
+ // C->V
+ g.AddEdge(c, c.Y(), false)
+ }
+
+ g.FindSCCs()
+ g.sccEdges = make([][]Edge, len(g.SCCs))
+ g.futures = make([][]Future, len(g.SCCs))
+ for _, e := range g.Edges {
+ g.sccEdges[e.From.SCC] = append(g.sccEdges[e.From.SCC], e)
+ if !e.control {
+ continue
+ }
+ if c, ok := e.To.Value.(Future); ok {
+ g.futures[e.From.SCC] = append(g.futures[e.From.SCC], c)
+ }
+ }
+ return g
+}
+
+func (g *Graph) Solve() Ranges {
+ var consts []Z
+ off := NewZ(1)
+ for _, n := range g.Vertices {
+ if c, ok := n.Value.(*ssa.Const); ok {
+ basic, ok := c.Type().Underlying().(*types.Basic)
+ if !ok {
+ continue
+ }
+ if (basic.Info() & types.IsInteger) != 0 {
+ z := ConstantToZ(c.Value)
+ consts = append(consts, z)
+ consts = append(consts, z.Add(off))
+ consts = append(consts, z.Sub(off))
+ }
+ }
+
+ }
+ sort.Sort(Zs(consts))
+
+ for scc, vertices := range g.SCCs {
+ n := 0
+ n = len(vertices)
+ if n == 1 {
+ g.resolveFutures(scc)
+ v := vertices[0]
+ if v, ok := v.Value.(ssa.Value); ok {
+ switch typ := v.Type().Underlying().(type) {
+ case *types.Basic:
+ switch typ.Kind() {
+ case types.String, types.UntypedString:
+ if !g.Range(v).(StringInterval).IsKnown() {
+ g.SetRange(v, StringInterval{NewIntInterval(NewZ(0), PInfinity)})
+ }
+ default:
+ if !g.Range(v).(IntInterval).IsKnown() {
+ g.SetRange(v, InfinityFor(v))
+ }
+ }
+ case *types.Chan:
+ if !g.Range(v).(ChannelInterval).IsKnown() {
+ g.SetRange(v, ChannelInterval{NewIntInterval(NewZ(0), PInfinity)})
+ }
+ case *types.Slice:
+ if !g.Range(v).(SliceInterval).IsKnown() {
+ g.SetRange(v, SliceInterval{NewIntInterval(NewZ(0), PInfinity)})
+ }
+ }
+ }
+ if c, ok := v.Value.(Constraint); ok {
+ g.SetRange(c.Y(), c.Eval(g))
+ }
+ } else {
+ uses := g.uses(scc)
+ entries := g.entries(scc)
+ for len(entries) > 0 {
+ v := entries[len(entries)-1]
+ entries = entries[:len(entries)-1]
+ for _, use := range uses[v] {
+ if g.widen(use, consts) {
+ entries = append(entries, use.Y())
+ }
+ }
+ }
+
+ g.resolveFutures(scc)
+
+ // XXX this seems to be necessary, but shouldn't be.
+ // removing it leads to nil pointer derefs; investigate
+ // where we're not setting values correctly.
+ for _, n := range vertices {
+ if v, ok := n.Value.(ssa.Value); ok {
+ i, ok := g.Range(v).(IntInterval)
+ if !ok {
+ continue
+ }
+ if !i.IsKnown() {
+ g.SetRange(v, InfinityFor(v))
+ }
+ }
+ }
+
+ actives := g.actives(scc)
+ for len(actives) > 0 {
+ v := actives[len(actives)-1]
+ actives = actives[:len(actives)-1]
+ for _, use := range uses[v] {
+ if g.narrow(use) {
+ actives = append(actives, use.Y())
+ }
+ }
+ }
+ }
+ // propagate scc
+ for _, edge := range g.sccEdges[scc] {
+ if edge.control {
+ continue
+ }
+ if edge.From.SCC == edge.To.SCC {
+ continue
+ }
+ if c, ok := edge.To.Value.(Constraint); ok {
+ g.SetRange(c.Y(), c.Eval(g))
+ }
+ if c, ok := edge.To.Value.(Future); ok {
+ if !c.IsKnown() {
+ c.MarkUnresolved()
+ }
+ }
+ }
+ }
+
+ for v, r := range g.ranges {
+ i, ok := r.(IntInterval)
+ if !ok {
+ continue
+ }
+ if (v.Type().Underlying().(*types.Basic).Info() & types.IsUnsigned) == 0 {
+ if i.Upper != PInfinity {
+ s := &types.StdSizes{
+ // XXX is it okay to assume the largest word size, or do we
+ // need to be platform specific?
+ WordSize: 8,
+ MaxAlign: 1,
+ }
+ bits := (s.Sizeof(v.Type()) * 8) - 1
+ n := big.NewInt(1)
+ n = n.Lsh(n, uint(bits))
+ upper, lower := &big.Int{}, &big.Int{}
+ upper.Sub(n, big.NewInt(1))
+ lower.Neg(n)
+
+ if i.Upper.Cmp(NewBigZ(upper)) == 1 {
+ i = NewIntInterval(NInfinity, PInfinity)
+ } else if i.Lower.Cmp(NewBigZ(lower)) == -1 {
+ i = NewIntInterval(NInfinity, PInfinity)
+ }
+ }
+ }
+
+ g.ranges[v] = i
+ }
+
+ return g.ranges
+}
+
+func VertexString(v *Vertex) string {
+ switch v := v.Value.(type) {
+ case Constraint:
+ return v.String()
+ case ssa.Value:
+ return v.Name()
+ case nil:
+ return "BUG: nil vertex value"
+ default:
+ panic(fmt.Sprintf("unexpected type %T", v))
+ }
+}
+
+type Vertex struct {
+ Value interface{} // one of Constraint or ssa.Value
+ SCC int
+ index int
+ lowlink int
+ stack bool
+
+ Succs []Edge
+}
+
+type Ranges map[ssa.Value]Range
+
+func (r Ranges) Get(x ssa.Value) Range {
+ if x == nil {
+ return nil
+ }
+ i, ok := r[x]
+ if !ok {
+ switch x := x.Type().Underlying().(type) {
+ case *types.Basic:
+ switch x.Kind() {
+ case types.String, types.UntypedString:
+ return StringInterval{}
+ default:
+ return IntInterval{}
+ }
+ case *types.Chan:
+ return ChannelInterval{}
+ case *types.Slice:
+ return SliceInterval{}
+ }
+ }
+ return i
+}
+
+type Graph struct {
+ Vertices map[interface{}]*Vertex
+ Edges []Edge
+ SCCs [][]*Vertex
+ ranges Ranges
+
+ // map SCCs to futures
+ futures [][]Future
+ // map SCCs to edges
+ sccEdges [][]Edge
+}
+
+func (g Graph) Graphviz() string {
+ var lines []string
+ lines = append(lines, "digraph{")
+ ids := map[interface{}]int{}
+ i := 1
+ for _, v := range g.Vertices {
+ ids[v] = i
+ shape := "box"
+ if _, ok := v.Value.(ssa.Value); ok {
+ shape = "oval"
+ }
+ lines = append(lines, fmt.Sprintf(`n%d [shape="%s", label=%q, colorscheme=spectral11, style="filled", fillcolor="%d"]`,
+ i, shape, VertexString(v), (v.SCC%11)+1))
+ i++
+ }
+ for _, e := range g.Edges {
+ style := "solid"
+ if e.control {
+ style = "dashed"
+ }
+ lines = append(lines, fmt.Sprintf(`n%d -> n%d [style="%s"]`, ids[e.From], ids[e.To], style))
+ }
+ lines = append(lines, "}")
+ return strings.Join(lines, "\n")
+}
+
+func (g *Graph) SetRange(x ssa.Value, r Range) {
+ g.ranges[x] = r
+}
+
+func (g *Graph) Range(x ssa.Value) Range {
+ return g.ranges.Get(x)
+}
+
+func (g *Graph) widen(c Constraint, consts []Z) bool {
+ setRange := func(i Range) {
+ g.SetRange(c.Y(), i)
+ }
+ widenIntInterval := func(oi, ni IntInterval) (IntInterval, bool) {
+ if !ni.IsKnown() {
+ return oi, false
+ }
+ nlc := NInfinity
+ nuc := PInfinity
+ for _, co := range consts {
+ if co.Cmp(ni.Lower) <= 0 {
+ nlc = co
+ break
+ }
+ }
+ for _, co := range consts {
+ if co.Cmp(ni.Upper) >= 0 {
+ nuc = co
+ break
+ }
+ }
+
+ if !oi.IsKnown() {
+ return ni, true
+ }
+ if ni.Lower.Cmp(oi.Lower) == -1 && ni.Upper.Cmp(oi.Upper) == 1 {
+ return NewIntInterval(nlc, nuc), true
+ }
+ if ni.Lower.Cmp(oi.Lower) == -1 {
+ return NewIntInterval(nlc, oi.Upper), true
+ }
+ if ni.Upper.Cmp(oi.Upper) == 1 {
+ return NewIntInterval(oi.Lower, nuc), true
+ }
+ return oi, false
+ }
+ switch oi := g.Range(c.Y()).(type) {
+ case IntInterval:
+ ni := c.Eval(g).(IntInterval)
+ si, changed := widenIntInterval(oi, ni)
+ if changed {
+ setRange(si)
+ return true
+ }
+ return false
+ case StringInterval:
+ ni := c.Eval(g).(StringInterval)
+ si, changed := widenIntInterval(oi.Length, ni.Length)
+ if changed {
+ setRange(StringInterval{si})
+ return true
+ }
+ return false
+ case SliceInterval:
+ ni := c.Eval(g).(SliceInterval)
+ si, changed := widenIntInterval(oi.Length, ni.Length)
+ if changed {
+ setRange(SliceInterval{si})
+ return true
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+func (g *Graph) narrow(c Constraint) bool {
+ narrowIntInterval := func(oi, ni IntInterval) (IntInterval, bool) {
+ oLower := oi.Lower
+ oUpper := oi.Upper
+ nLower := ni.Lower
+ nUpper := ni.Upper
+
+ if oLower == NInfinity && nLower != NInfinity {
+ return NewIntInterval(nLower, oUpper), true
+ }
+ if oUpper == PInfinity && nUpper != PInfinity {
+ return NewIntInterval(oLower, nUpper), true
+ }
+ if oLower.Cmp(nLower) == 1 {
+ return NewIntInterval(nLower, oUpper), true
+ }
+ if oUpper.Cmp(nUpper) == -1 {
+ return NewIntInterval(oLower, nUpper), true
+ }
+ return oi, false
+ }
+ switch oi := g.Range(c.Y()).(type) {
+ case IntInterval:
+ ni := c.Eval(g).(IntInterval)
+ si, changed := narrowIntInterval(oi, ni)
+ if changed {
+ g.SetRange(c.Y(), si)
+ return true
+ }
+ return false
+ case StringInterval:
+ ni := c.Eval(g).(StringInterval)
+ si, changed := narrowIntInterval(oi.Length, ni.Length)
+ if changed {
+ g.SetRange(c.Y(), StringInterval{si})
+ return true
+ }
+ return false
+ case SliceInterval:
+ ni := c.Eval(g).(SliceInterval)
+ si, changed := narrowIntInterval(oi.Length, ni.Length)
+ if changed {
+ g.SetRange(c.Y(), SliceInterval{si})
+ return true
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+func (g *Graph) resolveFutures(scc int) {
+ for _, c := range g.futures[scc] {
+ c.Resolve()
+ }
+}
+
+func (g *Graph) entries(scc int) []ssa.Value {
+ var entries []ssa.Value
+ for _, n := range g.Vertices {
+ if n.SCC != scc {
+ continue
+ }
+ if v, ok := n.Value.(ssa.Value); ok {
+ // XXX avoid quadratic runtime
+ //
+ // XXX I cannot think of any code where the future and its
+ // variables aren't in the same SCC, in which case this
+ // code isn't very useful (the variables won't be resolved
+ // yet). Before we have a cross-SCC example, however, we
+ // can't really verify that this code is working
+ // correctly, or indeed doing anything useful.
+ for _, on := range g.Vertices {
+ if c, ok := on.Value.(Future); ok {
+ if c.Y() == v {
+ if !c.IsResolved() {
+ g.SetRange(c.Y(), c.Eval(g))
+ c.MarkResolved()
+ }
+ break
+ }
+ }
+ }
+ if g.Range(v).IsKnown() {
+ entries = append(entries, v)
+ }
+ }
+ }
+ return entries
+}
+
+func (g *Graph) uses(scc int) map[ssa.Value][]Constraint {
+ m := map[ssa.Value][]Constraint{}
+ for _, e := range g.sccEdges[scc] {
+ if e.control {
+ continue
+ }
+ if v, ok := e.From.Value.(ssa.Value); ok {
+ c := e.To.Value.(Constraint)
+ sink := c.Y()
+ if g.Vertices[sink].SCC == scc {
+ m[v] = append(m[v], c)
+ }
+ }
+ }
+ return m
+}
+
+func (g *Graph) actives(scc int) []ssa.Value {
+ var actives []ssa.Value
+ for _, n := range g.Vertices {
+ if n.SCC != scc {
+ continue
+ }
+ if v, ok := n.Value.(ssa.Value); ok {
+ if _, ok := v.(*ssa.Const); !ok {
+ actives = append(actives, v)
+ }
+ }
+ }
+ return actives
+}
+
+func (g *Graph) AddEdge(from, to interface{}, ctrl bool) {
+ vf, ok := g.Vertices[from]
+ if !ok {
+ vf = &Vertex{Value: from}
+ g.Vertices[from] = vf
+ }
+ vt, ok := g.Vertices[to]
+ if !ok {
+ vt = &Vertex{Value: to}
+ g.Vertices[to] = vt
+ }
+ e := Edge{From: vf, To: vt, control: ctrl}
+ g.Edges = append(g.Edges, e)
+ vf.Succs = append(vf.Succs, e)
+}
+
+type Edge struct {
+ From, To *Vertex
+ control bool
+}
+
+func (e Edge) String() string {
+ return fmt.Sprintf("%s -> %s", VertexString(e.From), VertexString(e.To))
+}
+
+func (g *Graph) FindSCCs() {
+ // use Tarjan to find the SCCs
+
+ index := 1
+ var s []*Vertex
+
+ scc := 0
+ var strongconnect func(v *Vertex)
+ strongconnect = func(v *Vertex) {
+ // set the depth index for v to the smallest unused index
+ v.index = index
+ v.lowlink = index
+ index++
+ s = append(s, v)
+ v.stack = true
+
+ for _, e := range v.Succs {
+ w := e.To
+ if w.index == 0 {
+ // successor w has not yet been visited; recurse on it
+ strongconnect(w)
+ if w.lowlink < v.lowlink {
+ v.lowlink = w.lowlink
+ }
+ } else if w.stack {
+ // successor w is in stack s and hence in the current scc
+ if w.index < v.lowlink {
+ v.lowlink = w.index
+ }
+ }
+ }
+
+ if v.lowlink == v.index {
+ for {
+ w := s[len(s)-1]
+ s = s[:len(s)-1]
+ w.stack = false
+ w.SCC = scc
+ if w == v {
+ break
+ }
+ }
+ scc++
+ }
+ }
+ for _, v := range g.Vertices {
+ if v.index == 0 {
+ strongconnect(v)
+ }
+ }
+
+ g.SCCs = make([][]*Vertex, scc)
+ for _, n := range g.Vertices {
+ n.SCC = scc - n.SCC - 1
+ g.SCCs[n.SCC] = append(g.SCCs[n.SCC], n)
+ }
+}
+
+func invertToken(tok token.Token) token.Token {
+ switch tok {
+ case token.LSS:
+ return token.GEQ
+ case token.GTR:
+ return token.LEQ
+ case token.EQL:
+ return token.NEQ
+ case token.NEQ:
+ return token.EQL
+ case token.GEQ:
+ return token.LSS
+ case token.LEQ:
+ return token.GTR
+ default:
+ panic(fmt.Sprintf("unsupported token %s", tok))
+ }
+}
+
+func flipToken(tok token.Token) token.Token {
+ switch tok {
+ case token.LSS:
+ return token.GTR
+ case token.GTR:
+ return token.LSS
+ case token.EQL:
+ return token.EQL
+ case token.NEQ:
+ return token.NEQ
+ case token.GEQ:
+ return token.LEQ
+ case token.LEQ:
+ return token.GEQ
+ default:
+ panic(fmt.Sprintf("unsupported token %s", tok))
+ }
+}
+
+type CopyConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+func (c *CopyConstraint) String() string {
+ return fmt.Sprintf("%s = copy(%s)", c.Y().Name(), c.X.Name())
+}
+
+func (c *CopyConstraint) Eval(g *Graph) Range {
+ return g.Range(c.X)
+}
+
+func (c *CopyConstraint) Operands() []ssa.Value {
+ return []ssa.Value{c.X}
+}
+
+func NewCopyConstraint(x, y ssa.Value) Constraint {
+ return &CopyConstraint{
+ aConstraint: aConstraint{
+ y: y,
+ },
+ X: x,
+ }
+}
diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go
new file mode 100644
index 0000000..21889e8
--- /dev/null
+++ b/vendor/honnef.co/go/tools/unused/unused.go
@@ -0,0 +1,1064 @@
+package unused // import "honnef.co/go/tools/unused"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "path/filepath"
+ "strings"
+
+ "honnef.co/go/tools/lint"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+func NewLintChecker(c *Checker) *LintChecker {
+ l := &LintChecker{
+ c: c,
+ }
+ return l
+}
+
+type LintChecker struct {
+ c *Checker
+}
+
+func (*LintChecker) Name() string { return "unused" }
+func (*LintChecker) Prefix() string { return "U" }
+
+func (l *LintChecker) Init(*lint.Program) {}
+func (l *LintChecker) Funcs() map[string]lint.Func {
+ return map[string]lint.Func{
+ "U1000": l.Lint,
+ }
+}
+
+func typString(obj types.Object) string {
+ switch obj := obj.(type) {
+ case *types.Func:
+ return "func"
+ case *types.Var:
+ if obj.IsField() {
+ return "field"
+ }
+ return "var"
+ case *types.Const:
+ return "const"
+ case *types.TypeName:
+ return "type"
+ default:
+ // log.Printf("%T", obj)
+ return "identifier"
+ }
+}
+
+func (l *LintChecker) Lint(j *lint.Job) {
+ unused := l.c.Check(j.Program.Prog)
+ for _, u := range unused {
+ name := u.Obj.Name()
+ if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil {
+ switch sig.Recv().Type().(type) {
+ case *types.Named, *types.Pointer:
+ typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" })
+ if len(typ) > 0 && typ[0] == '*' {
+ name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name())
+ } else if len(typ) > 0 {
+ name = fmt.Sprintf("%s.%s", typ, u.Obj.Name())
+ }
+ }
+ }
+ j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name)
+ }
+}
+
+type graph struct {
+ roots []*graphNode
+ nodes map[interface{}]*graphNode
+}
+
+func (g *graph) markUsedBy(obj, usedBy interface{}) {
+ objNode := g.getNode(obj)
+ usedByNode := g.getNode(usedBy)
+ if objNode.obj == usedByNode.obj {
+ return
+ }
+ usedByNode.uses[objNode] = struct{}{}
+}
+
+var labelCounter = 1
+
+func (g *graph) getNode(obj interface{}) *graphNode {
+ for {
+ if pt, ok := obj.(*types.Pointer); ok {
+ obj = pt.Elem()
+ } else {
+ break
+ }
+ }
+ _, ok := g.nodes[obj]
+ if !ok {
+ g.addObj(obj)
+ }
+
+ return g.nodes[obj]
+}
+
+func (g *graph) addObj(obj interface{}) {
+ if pt, ok := obj.(*types.Pointer); ok {
+ obj = pt.Elem()
+ }
+ node := &graphNode{obj: obj, uses: make(map[*graphNode]struct{}), n: labelCounter}
+ g.nodes[obj] = node
+ labelCounter++
+
+ if obj, ok := obj.(*types.Struct); ok {
+ n := obj.NumFields()
+ for i := 0; i < n; i++ {
+ field := obj.Field(i)
+ g.markUsedBy(obj, field)
+ }
+ }
+}
+
+type graphNode struct {
+ obj interface{}
+ uses map[*graphNode]struct{}
+ used bool
+ quiet bool
+ n int
+}
+
+type CheckMode int
+
+const (
+ CheckConstants CheckMode = 1 << iota
+ CheckFields
+ CheckFunctions
+ CheckTypes
+ CheckVariables
+
+ CheckAll = CheckConstants | CheckFields | CheckFunctions | CheckTypes | CheckVariables
+)
+
+type Unused struct {
+ Obj types.Object
+ Position token.Position
+}
+
+type Checker struct {
+ Mode CheckMode
+ WholeProgram bool
+ ConsiderReflection bool
+ Debug io.Writer
+
+ graph *graph
+
+ msCache typeutil.MethodSetCache
+ lprog *loader.Program
+ topmostCache map[*types.Scope]*types.Scope
+ interfaces []*types.Interface
+}
+
+func NewChecker(mode CheckMode) *Checker {
+ return &Checker{
+ Mode: mode,
+ graph: &graph{
+ nodes: make(map[interface{}]*graphNode),
+ },
+ topmostCache: make(map[*types.Scope]*types.Scope),
+ }
+}
+
+func (c *Checker) checkConstants() bool { return (c.Mode & CheckConstants) > 0 }
+func (c *Checker) checkFields() bool { return (c.Mode & CheckFields) > 0 }
+func (c *Checker) checkFunctions() bool { return (c.Mode & CheckFunctions) > 0 }
+func (c *Checker) checkTypes() bool { return (c.Mode & CheckTypes) > 0 }
+func (c *Checker) checkVariables() bool { return (c.Mode & CheckVariables) > 0 }
+
+func (c *Checker) markFields(typ types.Type) {
+ structType, ok := typ.Underlying().(*types.Struct)
+ if !ok {
+ return
+ }
+ n := structType.NumFields()
+ for i := 0; i < n; i++ {
+ field := structType.Field(i)
+ c.graph.markUsedBy(field, typ)
+ }
+}
+
+type Error struct {
+ Errors map[string][]error
+}
+
+func (e Error) Error() string {
+ return fmt.Sprintf("errors in %d packages", len(e.Errors))
+}
+
+func (c *Checker) Check(lprog *loader.Program) []Unused {
+ var unused []Unused
+ c.lprog = lprog
+ if c.WholeProgram {
+ c.findExportedInterfaces()
+ }
+ for _, pkg := range c.lprog.InitialPackages() {
+ c.processDefs(pkg)
+ c.processUses(pkg)
+ c.processTypes(pkg)
+ c.processSelections(pkg)
+ c.processAST(pkg)
+ }
+
+ for _, node := range c.graph.nodes {
+ obj, ok := node.obj.(types.Object)
+ if !ok {
+ continue
+ }
+ typNode, ok := c.graph.nodes[obj.Type()]
+ if !ok {
+ continue
+ }
+ node.uses[typNode] = struct{}{}
+ }
+
+ roots := map[*graphNode]struct{}{}
+ for _, root := range c.graph.roots {
+ roots[root] = struct{}{}
+ }
+ markNodesUsed(roots)
+ c.markNodesQuiet()
+
+ if c.Debug != nil {
+ c.printDebugGraph(c.Debug)
+ }
+
+ for _, node := range c.graph.nodes {
+ if node.used || node.quiet {
+ continue
+ }
+ obj, ok := node.obj.(types.Object)
+ if !ok {
+ continue
+ }
+ found := false
+ if !false {
+ for _, pkg := range c.lprog.InitialPackages() {
+ if pkg.Pkg == obj.Pkg() {
+ found = true
+ break
+ }
+ }
+ }
+ if !found {
+ continue
+ }
+
+ pos := c.lprog.Fset.Position(obj.Pos())
+ if pos.Filename == "" || filepath.Base(pos.Filename) == "C" {
+ continue
+ }
+ generated := false
+ for _, file := range c.lprog.Package(obj.Pkg().Path()).Files {
+ if c.lprog.Fset.Position(file.Pos()).Filename != pos.Filename {
+ continue
+ }
+ if len(file.Comments) > 0 {
+ generated = isGenerated(file.Comments[0].Text())
+ }
+ break
+ }
+ if generated {
+ continue
+ }
+ unused = append(unused, Unused{Obj: obj, Position: pos})
+ }
+ return unused
+}
+
+// isNoCopyType reports whether a type represents the NoCopy sentinel
+// type. The NoCopy type is a named struct with no fields and exactly
+// one method `func Lock()` that is empty.
+//
+// FIXME(dh): currently we're not checking that the function body is
+// empty.
+func isNoCopyType(typ types.Type) bool {
+ st, ok := typ.Underlying().(*types.Struct)
+ if !ok {
+ return false
+ }
+ if st.NumFields() != 0 {
+ return false
+ }
+
+ named, ok := typ.(*types.Named)
+ if !ok {
+ return false
+ }
+ if named.NumMethods() != 1 {
+ return false
+ }
+ meth := named.Method(0)
+ if meth.Name() != "Lock" {
+ return false
+ }
+ sig := meth.Type().(*types.Signature)
+ if sig.Params().Len() != 0 || sig.Results().Len() != 0 {
+ return false
+ }
+ return true
+}
+
+func (c *Checker) useNoCopyFields(typ types.Type) {
+ if st, ok := typ.Underlying().(*types.Struct); ok {
+ n := st.NumFields()
+ for i := 0; i < n; i++ {
+ field := st.Field(i)
+ if isNoCopyType(field.Type()) {
+ c.graph.markUsedBy(field, typ)
+ c.graph.markUsedBy(field.Type().(*types.Named).Method(0), field.Type())
+ }
+ }
+ }
+}
+
+func (c *Checker) useExportedFields(typ types.Type) {
+ if st, ok := typ.Underlying().(*types.Struct); ok {
+ n := st.NumFields()
+ for i := 0; i < n; i++ {
+ field := st.Field(i)
+ if field.Exported() {
+ c.graph.markUsedBy(field, typ)
+ }
+ }
+ }
+}
+
+func (c *Checker) useExportedMethods(typ types.Type) {
+ named, ok := typ.(*types.Named)
+ if !ok {
+ return
+ }
+ ms := typeutil.IntuitiveMethodSet(named, &c.msCache)
+ for i := 0; i < len(ms); i++ {
+ meth := ms[i].Obj()
+ if meth.Exported() {
+ c.graph.markUsedBy(meth, typ)
+ }
+ }
+
+ st, ok := named.Underlying().(*types.Struct)
+ if !ok {
+ return
+ }
+ n := st.NumFields()
+ for i := 0; i < n; i++ {
+ field := st.Field(i)
+ if !field.Anonymous() {
+ continue
+ }
+ ms := typeutil.IntuitiveMethodSet(field.Type(), &c.msCache)
+ for j := 0; j < len(ms); j++ {
+ if ms[j].Obj().Exported() {
+ c.graph.markUsedBy(field, typ)
+ break
+ }
+ }
+ }
+}
+
+func (c *Checker) processDefs(pkg *loader.PackageInfo) {
+ for _, obj := range pkg.Defs {
+ if obj == nil {
+ continue
+ }
+ c.graph.getNode(obj)
+
+ if obj, ok := obj.(*types.TypeName); ok {
+ c.graph.markUsedBy(obj.Type().Underlying(), obj.Type())
+ c.graph.markUsedBy(obj.Type(), obj) // TODO is this needed?
+ c.graph.markUsedBy(obj, obj.Type())
+
+ // We mark all exported fields as used. For normal
+ // operation, we have to. The user may use these fields
+ // without us knowing.
+ //
+ // TODO(dh): In whole-program mode, however, we mark them
+ // as used because of reflection (such as JSON
+ // marshaling). Strictly speaking, we would only need to
+ // mark them used if an instance of the type was
+ // accessible via an interface value.
+ if !c.WholeProgram || c.ConsiderReflection {
+ c.useExportedFields(obj.Type())
+ }
+
+ // TODO(dh): Traditionally we have not marked all exported
+ // methods as exported, even though they're strictly
+ // speaking accessible through reflection. We've done that
+ // because using methods just via reflection is rare, and
+ // not worth the false negatives. With the new -reflect
+ // flag, however, we should reconsider that choice.
+ if !c.WholeProgram {
+ c.useExportedMethods(obj.Type())
+ }
+ }
+
+ switch obj := obj.(type) {
+ case *types.Var, *types.Const, *types.Func, *types.TypeName:
+ if obj.Exported() {
+ // Exported variables and constants use their types,
+ // even if there's no expression using them in the
+ // checked program.
+ //
+ // Also operates on funcs and type names, but that's
+ // irrelevant/redundant.
+ c.graph.markUsedBy(obj.Type(), obj)
+ }
+ if obj.Name() == "_" {
+ node := c.graph.getNode(obj)
+ node.quiet = true
+ scope := c.topmostScope(pkg.Pkg.Scope().Innermost(obj.Pos()), pkg.Pkg)
+ if scope == pkg.Pkg.Scope() {
+ c.graph.roots = append(c.graph.roots, node)
+ } else {
+ c.graph.markUsedBy(obj, scope)
+ }
+ } else {
+ // Variables declared in functions are used. This is
+ // done so that arguments and return parameters are
+ // always marked as used.
+ if _, ok := obj.(*types.Var); ok {
+ if obj.Parent() != obj.Pkg().Scope() && obj.Parent() != nil {
+ c.graph.markUsedBy(obj, c.topmostScope(obj.Parent(), obj.Pkg()))
+ c.graph.markUsedBy(obj.Type(), obj)
+ }
+ }
+ }
+ }
+
+ if fn, ok := obj.(*types.Func); ok {
+ // A function uses its signature
+ c.graph.markUsedBy(fn, fn.Type())
+
+ // A function uses its return types
+ sig := fn.Type().(*types.Signature)
+ res := sig.Results()
+ n := res.Len()
+ for i := 0; i < n; i++ {
+ c.graph.markUsedBy(res.At(i).Type(), fn)
+ }
+ }
+
+ if obj, ok := obj.(interface {
+ Scope() *types.Scope
+ Pkg() *types.Package
+ }); ok {
+ scope := obj.Scope()
+ c.graph.markUsedBy(c.topmostScope(scope, obj.Pkg()), obj)
+ }
+
+ if c.isRoot(obj) {
+ node := c.graph.getNode(obj)
+ c.graph.roots = append(c.graph.roots, node)
+ if obj, ok := obj.(*types.PkgName); ok {
+ scope := obj.Pkg().Scope()
+ c.graph.markUsedBy(scope, obj)
+ }
+ }
+ }
+}
+
+func (c *Checker) processUses(pkg *loader.PackageInfo) {
+ for ident, usedObj := range pkg.Uses {
+ if _, ok := usedObj.(*types.PkgName); ok {
+ continue
+ }
+ pos := ident.Pos()
+ scope := pkg.Pkg.Scope().Innermost(pos)
+ scope = c.topmostScope(scope, pkg.Pkg)
+ if scope != pkg.Pkg.Scope() {
+ c.graph.markUsedBy(usedObj, scope)
+ }
+
+ switch usedObj.(type) {
+ case *types.Var, *types.Const:
+ c.graph.markUsedBy(usedObj.Type(), usedObj)
+ }
+ }
+}
+
+func (c *Checker) findExportedInterfaces() {
+ c.interfaces = []*types.Interface{types.Universe.Lookup("error").Type().(*types.Named).Underlying().(*types.Interface)}
+ var pkgs []*loader.PackageInfo
+ if c.WholeProgram {
+ for _, pkg := range c.lprog.AllPackages {
+ pkgs = append(pkgs, pkg)
+ }
+ } else {
+ pkgs = c.lprog.InitialPackages()
+ }
+
+ for _, pkg := range pkgs {
+ for _, tv := range pkg.Types {
+ iface, ok := tv.Type.(*types.Interface)
+ if !ok {
+ continue
+ }
+ if iface.NumMethods() == 0 {
+ continue
+ }
+ c.interfaces = append(c.interfaces, iface)
+ }
+ }
+}
+
+func (c *Checker) processTypes(pkg *loader.PackageInfo) {
+ named := map[*types.Named]*types.Pointer{}
+ var interfaces []*types.Interface
+ for _, tv := range pkg.Types {
+ if typ, ok := tv.Type.(interface {
+ Elem() types.Type
+ }); ok {
+ c.graph.markUsedBy(typ.Elem(), typ)
+ }
+
+ switch obj := tv.Type.(type) {
+ case *types.Named:
+ named[obj] = types.NewPointer(obj)
+ c.graph.markUsedBy(obj, obj.Underlying())
+ c.graph.markUsedBy(obj.Underlying(), obj)
+ case *types.Interface:
+ if obj.NumMethods() > 0 {
+ interfaces = append(interfaces, obj)
+ }
+ case *types.Struct:
+ c.useNoCopyFields(obj)
+ if pkg.Pkg.Name() != "main" && !c.WholeProgram {
+ c.useExportedFields(obj)
+ }
+ }
+ }
+
+ // Pretend that all types are meant to implement as many
+ // interfaces as possible.
+ //
+ // TODO(dh): For normal operations, that's the best we can do, as
+ // we have no idea what external users will do with our types. In
+ // whole-program mode, we could be more conservative, in two ways:
+ // 1) Only consider interfaces if a type has been assigned to one
+ // 2) Use SSA and flow analysis and determine the exact set of
+ // interfaces that is relevant.
+ fn := func(iface *types.Interface) {
+ for obj, objPtr := range named {
+ if !types.Implements(obj, iface) && !types.Implements(objPtr, iface) {
+ continue
+ }
+ ifaceMethods := make(map[string]struct{}, iface.NumMethods())
+ n := iface.NumMethods()
+ for i := 0; i < n; i++ {
+ meth := iface.Method(i)
+ ifaceMethods[meth.Name()] = struct{}{}
+ }
+ for _, obj := range []types.Type{obj, objPtr} {
+ ms := c.msCache.MethodSet(obj)
+ n := ms.Len()
+ for i := 0; i < n; i++ {
+ sel := ms.At(i)
+ meth := sel.Obj().(*types.Func)
+ _, found := ifaceMethods[meth.Name()]
+ if !found {
+ continue
+ }
+ c.graph.markUsedBy(meth.Type().(*types.Signature).Recv().Type(), obj) // embedded receiver
+ if len(sel.Index()) > 1 {
+ f := getField(obj, sel.Index()[0])
+ c.graph.markUsedBy(f, obj) // embedded receiver
+ }
+ c.graph.markUsedBy(meth, obj)
+ }
+ }
+ }
+ }
+
+ for _, iface := range interfaces {
+ fn(iface)
+ }
+ for _, iface := range c.interfaces {
+ fn(iface)
+ }
+}
+
+func (c *Checker) processSelections(pkg *loader.PackageInfo) {
+ fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) {
+ scope := pkg.Pkg.Scope().Innermost(expr.Pos())
+ c.graph.markUsedBy(expr.X, c.topmostScope(scope, pkg.Pkg))
+ c.graph.markUsedBy(sel.Obj(), expr.X)
+ if len(sel.Index()) > 1 {
+ typ := sel.Recv()
+ indices := sel.Index()
+ for _, idx := range indices[:len(indices)-offset] {
+ obj := getField(typ, idx)
+ typ = obj.Type()
+ c.graph.markUsedBy(obj, expr.X)
+ }
+ }
+ }
+
+ for expr, sel := range pkg.Selections {
+ switch sel.Kind() {
+ case types.FieldVal:
+ fn(expr, sel, 0)
+ case types.MethodVal:
+ fn(expr, sel, 1)
+ }
+ }
+}
+
+func dereferenceType(typ types.Type) types.Type {
+ if typ, ok := typ.(*types.Pointer); ok {
+ return typ.Elem()
+ }
+ return typ
+}
+
+// processConversion marks fields as used if they're part of a type conversion.
+func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) {
+ if node, ok := node.(*ast.CallExpr); ok {
+ callTyp := pkg.TypeOf(node.Fun)
+ var typDst *types.Struct
+ var ok bool
+ switch typ := callTyp.(type) {
+ case *types.Named:
+ typDst, ok = typ.Underlying().(*types.Struct)
+ case *types.Pointer:
+ typDst, ok = typ.Elem().Underlying().(*types.Struct)
+ default:
+ return
+ }
+ if !ok {
+ return
+ }
+
+ if typ, ok := pkg.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer {
+ // This is an unsafe conversion. Assume that all the
+ // fields are relevant (they are, because of memory
+ // layout)
+ n := typDst.NumFields()
+ for i := 0; i < n; i++ {
+ c.graph.markUsedBy(typDst.Field(i), typDst)
+ }
+ return
+ }
+
+ typSrc, ok := dereferenceType(pkg.TypeOf(node.Args[0])).Underlying().(*types.Struct)
+ if !ok {
+ return
+ }
+
+ // When we convert from type t1 to t2, were t1 and t2 are
+ // structs, all fields are relevant, as otherwise the
+ // conversion would fail.
+ //
+ // We mark t2's fields as used by t1's fields, and vice
+ // versa. That way, if no code actually refers to a field
+ // in either type, it's still correctly marked as unused.
+ // If a field is used in either struct, it's implicitly
+ // relevant in the other one, too.
+ //
+ // It works in a similar way for conversions between types
+ // of two packages, only that the extra information in the
+ // graph is redundant unless we're in whole program mode.
+ n := typDst.NumFields()
+ for i := 0; i < n; i++ {
+ fDst := typDst.Field(i)
+ fSrc := typSrc.Field(i)
+ c.graph.markUsedBy(fDst, fSrc)
+ c.graph.markUsedBy(fSrc, fDst)
+ }
+ }
+}
+
+// processCompositeLiteral marks fields as used if the struct is used
+// in a composite literal.
+func (c *Checker) processCompositeLiteral(pkg *loader.PackageInfo, node ast.Node) {
+ // XXX how does this actually work? wouldn't it match t{}?
+ if node, ok := node.(*ast.CompositeLit); ok {
+ typ := pkg.TypeOf(node)
+ if _, ok := typ.(*types.Named); ok {
+ typ = typ.Underlying()
+ }
+ if _, ok := typ.(*types.Struct); !ok {
+ return
+ }
+
+ if isBasicStruct(node.Elts) {
+ c.markFields(typ)
+ }
+ }
+}
+
+// processCgoExported marks functions as used if they're being
+// exported to cgo.
+func (c *Checker) processCgoExported(pkg *loader.PackageInfo, node ast.Node) {
+ if node, ok := node.(*ast.FuncDecl); ok {
+ if node.Doc == nil {
+ return
+ }
+ for _, cmt := range node.Doc.List {
+ if !strings.HasPrefix(cmt.Text, "//go:cgo_export_") {
+ return
+ }
+ obj := pkg.ObjectOf(node.Name)
+ c.graph.roots = append(c.graph.roots, c.graph.getNode(obj))
+ }
+ }
+}
+
+func (c *Checker) processVariableDeclaration(pkg *loader.PackageInfo, node ast.Node) {
+ if decl, ok := node.(*ast.GenDecl); ok {
+ for _, spec := range decl.Specs {
+ spec, ok := spec.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ for i, name := range spec.Names {
+ if i >= len(spec.Values) {
+ break
+ }
+ value := spec.Values[i]
+ fn := func(node ast.Node) bool {
+ if node3, ok := node.(*ast.Ident); ok {
+ obj := pkg.ObjectOf(node3)
+ if _, ok := obj.(*types.PkgName); ok {
+ return true
+ }
+ c.graph.markUsedBy(obj, pkg.ObjectOf(name))
+ }
+ return true
+ }
+ ast.Inspect(value, fn)
+ }
+ }
+ }
+}
+
+func (c *Checker) processArrayConstants(pkg *loader.PackageInfo, node ast.Node) {
+ if decl, ok := node.(*ast.ArrayType); ok {
+ ident, ok := decl.Len.(*ast.Ident)
+ if !ok {
+ return
+ }
+ c.graph.markUsedBy(pkg.ObjectOf(ident), pkg.TypeOf(decl))
+ }
+}
+
+func (c *Checker) processKnownReflectMethodCallers(pkg *loader.PackageInfo, node ast.Node) {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+ if types.TypeString(pkg.TypeOf(sel.X), nil) != "*net/rpc.Server" {
+ x, ok := sel.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ pkgname, ok := pkg.ObjectOf(x).(*types.PkgName)
+ if !ok {
+ return
+ }
+ if pkgname.Imported().Path() != "net/rpc" {
+ return
+ }
+ }
+
+ var arg ast.Expr
+ switch sel.Sel.Name {
+ case "Register":
+ if len(call.Args) != 1 {
+ return
+ }
+ arg = call.Args[0]
+ case "RegisterName":
+ if len(call.Args) != 2 {
+ return
+ }
+ arg = call.Args[1]
+ }
+ typ := pkg.TypeOf(arg)
+ ms := types.NewMethodSet(typ)
+ for i := 0; i < ms.Len(); i++ {
+ c.graph.markUsedBy(ms.At(i).Obj(), typ)
+ }
+}
+
+func (c *Checker) processAST(pkg *loader.PackageInfo) {
+ fn := func(node ast.Node) bool {
+ c.processConversion(pkg, node)
+ c.processKnownReflectMethodCallers(pkg, node)
+ c.processCompositeLiteral(pkg, node)
+ c.processCgoExported(pkg, node)
+ c.processVariableDeclaration(pkg, node)
+ c.processArrayConstants(pkg, node)
+ return true
+ }
+ for _, file := range pkg.Files {
+ ast.Inspect(file, fn)
+ }
+}
+
+func isBasicStruct(elts []ast.Expr) bool {
+ for _, elt := range elts {
+ if _, ok := elt.(*ast.KeyValueExpr); !ok {
+ return true
+ }
+ }
+ return false
+}
+
+func isPkgScope(obj types.Object) bool {
+ return obj.Parent() == obj.Pkg().Scope()
+}
+
+func isMain(obj types.Object) bool {
+ if obj.Pkg().Name() != "main" {
+ return false
+ }
+ if obj.Name() != "main" {
+ return false
+ }
+ if !isPkgScope(obj) {
+ return false
+ }
+ if !isFunction(obj) {
+ return false
+ }
+ if isMethod(obj) {
+ return false
+ }
+ return true
+}
+
+func isFunction(obj types.Object) bool {
+ _, ok := obj.(*types.Func)
+ return ok
+}
+
+func isMethod(obj types.Object) bool {
+ if !isFunction(obj) {
+ return false
+ }
+ return obj.(*types.Func).Type().(*types.Signature).Recv() != nil
+}
+
+func isVariable(obj types.Object) bool {
+ _, ok := obj.(*types.Var)
+ return ok
+}
+
+func isConstant(obj types.Object) bool {
+ _, ok := obj.(*types.Const)
+ return ok
+}
+
+func isType(obj types.Object) bool {
+ _, ok := obj.(*types.TypeName)
+ return ok
+}
+
+func isField(obj types.Object) bool {
+ if obj, ok := obj.(*types.Var); ok && obj.IsField() {
+ return true
+ }
+ return false
+}
+
+func (c *Checker) checkFlags(v interface{}) bool {
+ obj, ok := v.(types.Object)
+ if !ok {
+ return false
+ }
+ if isFunction(obj) && !c.checkFunctions() {
+ return false
+ }
+ if isVariable(obj) && !c.checkVariables() {
+ return false
+ }
+ if isConstant(obj) && !c.checkConstants() {
+ return false
+ }
+ if isType(obj) && !c.checkTypes() {
+ return false
+ }
+ if isField(obj) && !c.checkFields() {
+ return false
+ }
+ return true
+}
+
+func (c *Checker) isRoot(obj types.Object) bool {
+ // - in local mode, main, init, tests, and non-test, non-main exported are roots
+ // - in global mode (not yet implemented), main, init and tests are roots
+
+ if _, ok := obj.(*types.PkgName); ok {
+ return true
+ }
+
+ if isMain(obj) || (isFunction(obj) && !isMethod(obj) && obj.Name() == "init") {
+ return true
+ }
+ if obj.Exported() {
+ f := c.lprog.Fset.Position(obj.Pos()).Filename
+ if strings.HasSuffix(f, "_test.go") {
+ return strings.HasPrefix(obj.Name(), "Test") ||
+ strings.HasPrefix(obj.Name(), "Benchmark") ||
+ strings.HasPrefix(obj.Name(), "Example")
+ }
+
+ // Package-level are used, except in package main
+ if isPkgScope(obj) && obj.Pkg().Name() != "main" && !c.WholeProgram {
+ return true
+ }
+ }
+ return false
+}
+
+func markNodesUsed(nodes map[*graphNode]struct{}) {
+ for node := range nodes {
+ wasUsed := node.used
+ node.used = true
+ if !wasUsed {
+ markNodesUsed(node.uses)
+ }
+ }
+}
+
+func (c *Checker) markNodesQuiet() {
+ for _, node := range c.graph.nodes {
+ if node.used {
+ continue
+ }
+ if obj, ok := node.obj.(types.Object); ok && !c.checkFlags(obj) {
+ node.quiet = true
+ continue
+ }
+ c.markObjQuiet(node.obj)
+ }
+}
+
+func (c *Checker) markObjQuiet(obj interface{}) {
+ switch obj := obj.(type) {
+ case *types.Named:
+ n := obj.NumMethods()
+ for i := 0; i < n; i++ {
+ meth := obj.Method(i)
+ node := c.graph.getNode(meth)
+ node.quiet = true
+ c.markObjQuiet(meth.Scope())
+ }
+ case *types.Struct:
+ n := obj.NumFields()
+ for i := 0; i < n; i++ {
+ field := obj.Field(i)
+ c.graph.nodes[field].quiet = true
+ }
+ case *types.Func:
+ c.markObjQuiet(obj.Scope())
+ case *types.Scope:
+ if obj == nil {
+ return
+ }
+ if obj.Parent() == types.Universe {
+ return
+ }
+ for _, name := range obj.Names() {
+ v := obj.Lookup(name)
+ if n, ok := c.graph.nodes[v]; ok {
+ n.quiet = true
+ }
+ }
+ n := obj.NumChildren()
+ for i := 0; i < n; i++ {
+ c.markObjQuiet(obj.Child(i))
+ }
+ }
+}
+
+func getField(typ types.Type, idx int) *types.Var {
+ switch obj := typ.(type) {
+ case *types.Pointer:
+ return getField(obj.Elem(), idx)
+ case *types.Named:
+ switch v := obj.Underlying().(type) {
+ case *types.Struct:
+ return v.Field(idx)
+ case *types.Pointer:
+ return getField(v.Elem(), idx)
+ default:
+ panic(fmt.Sprintf("unexpected type %s", typ))
+ }
+ case *types.Struct:
+ return obj.Field(idx)
+ }
+ return nil
+}
+
+func (c *Checker) topmostScope(scope *types.Scope, pkg *types.Package) (ret *types.Scope) {
+ if top, ok := c.topmostCache[scope]; ok {
+ return top
+ }
+ defer func() {
+ c.topmostCache[scope] = ret
+ }()
+ if scope == pkg.Scope() {
+ return scope
+ }
+ if scope.Parent().Parent() == pkg.Scope() {
+ return scope
+ }
+ return c.topmostScope(scope.Parent(), pkg)
+}
+
+func (c *Checker) printDebugGraph(w io.Writer) {
+ fmt.Fprintln(w, "digraph {")
+ fmt.Fprintln(w, "n0 [label = roots]")
+ for _, node := range c.graph.nodes {
+ s := fmt.Sprintf("%s (%T)", node.obj, node.obj)
+ s = strings.Replace(s, "\n", "", -1)
+ s = strings.Replace(s, `"`, "", -1)
+ fmt.Fprintf(w, `n%d [label = %q]`, node.n, s)
+ color := "black"
+ switch {
+ case node.used:
+ color = "green"
+ case node.quiet:
+ color = "orange"
+ case !c.checkFlags(node.obj):
+ color = "purple"
+ default:
+ color = "red"
+ }
+ fmt.Fprintf(w, "[color = %s]", color)
+ fmt.Fprintln(w)
+ }
+
+ for _, node1 := range c.graph.nodes {
+ for node2 := range node1.uses {
+ fmt.Fprintf(w, "n%d -> n%d\n", node1.n, node2.n)
+ }
+ }
+ for _, root := range c.graph.roots {
+ fmt.Fprintf(w, "n0 -> n%d\n", root.n)
+ }
+ fmt.Fprintln(w, "}")
+}
+
+func isGenerated(comment string) bool {
+ return strings.Contains(comment, "Code generated by") ||
+ strings.Contains(comment, "DO NOT EDIT")
+}
diff --git a/vendor/honnef.co/go/tools/version/version.go b/vendor/honnef.co/go/tools/version/version.go
new file mode 100644
index 0000000..9536482
--- /dev/null
+++ b/vendor/honnef.co/go/tools/version/version.go
@@ -0,0 +1,17 @@
+package version
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+)
+
+const Version = "2017.2.2"
+
+func Print() {
+ if Version == "devel" {
+ fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0]))
+ } else {
+ fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), Version)
+ }
+}