Update and convert glide.lock; chase gps tip
diff --git a/glide.lock b/glide.lock
index 0482f9d..04611c5 100644
--- a/glide.lock
+++ b/glide.lock
@@ -1,18 +1,24 @@
-hash: 7b0d46d2b21d5d8ff24023a402285f87b14b9f554ae52913cc7ea08bfd17453d
-updated: 2016-07-13T23:09:11.03428654-04:00
+hash: e12d18f87508f2f53e2981b52a02ed23d135f59ab90f3afca813727c0685eec0
+updated: 2016-09-15T09:56:26.054743146-04:00
imports:
- name: github.com/armon/go-radix
- version: 4239b77079c7b5d1243b7b4736304ce8ddb6f0f2
+ branch: master
+ revision: 4239b77079c7b5d1243b7b4736304ce8ddb6f0f2
- name: github.com/codegangsta/cli
- version: 71f57d300dd6a780ac1856c005c4b518cfd498ec
+ version: v1.14.0
+ revision: 71f57d300dd6a780ac1856c005c4b518cfd498ec
- name: github.com/Masterminds/semver
- version: b3ef6b1808e9889dfb8767ce7068db923a3d07de
+ branch: 2.x
+ revision: b3ef6b1808e9889dfb8767ce7068db923a3d07de
- name: github.com/Masterminds/vcs
- version: fbe9fb6ad5b5f35b3e82a7c21123cfc526cbf895
+ version: v1.8.0
+ revision: fbe9fb6ad5b5f35b3e82a7c21123cfc526cbf895
- name: github.com/sdboyer/gps
- version: 6e8a101af5e735feedcdae1f716bebc338b74525
+ branch: master
+ revision: 166f36103aedbb9d835b9b6dcc7762a6bd900a98
- name: github.com/termie/go-shutil
- version: bcacb06fecaeec8dc42af03c87c6949f4a05c74c
+ revision: bcacb06fecaeec8dc42af03c87c6949f4a05c74c
- name: gopkg.in/yaml.v2
- version: a83829b6f1293c91addabc89d0571c246397bbf4
+ branch: v2
+ revision: 31c299268d302dd0aa9a0dcf765a3d58971ac83f
testImports: []
diff --git a/vendor/github.com/sdboyer/gps/README.md b/vendor/github.com/sdboyer/gps/README.md
index 2cd2d99..381c2a8 100644
--- a/vendor/github.com/sdboyer/gps/README.md
+++ b/vendor/github.com/sdboyer/gps/README.md
@@ -3,7 +3,7 @@
src="header.png"
width="800" height="255" border="0" alt="gps">
<br>
-<a href="https://circleci.com/gh/sdboyer/gps"><img src="https://circleci.com/gh/sdboyer/gps.svg?style=svg" alt="Build Status"></a>
+<a href="https://circleci.com/gh/sdboyer/gps"><img src="https://circleci.com/gh/sdboyer/gps.svg?style=shield" alt="Build Status"></a>
<a href="https://ci.appveyor.com/project/sdboyer/gps"><img src="https://ci.appveyor.com/api/projects/status/github/sdboyer/gps?svg=true&branch=master&passingText=Windows%20-%20OK&failingText=Windows%20-%20failed&pendingText=Windows%20-%20pending" alt="Windows Build Status"></a>
<a href="https://goreportcard.com/report/github.com/sdboyer/gps"><img src="https://goreportcard.com/badge/github.com/sdboyer/gps" alt="Build Status"></a>
<a href="https://codecov.io/gh/sdboyer/gps"><img src="https://codecov.io/gh/sdboyer/gps/branch/master/graph/badge.svg" alt="Codecov" /></a>
diff --git a/vendor/github.com/sdboyer/gps/analysis.go b/vendor/github.com/sdboyer/gps/analysis.go
index 7fcb5bf..d410eb3 100644
--- a/vendor/github.com/sdboyer/gps/analysis.go
+++ b/vendor/github.com/sdboyer/gps/analysis.go
@@ -69,7 +69,7 @@
// A PackageTree is returned, which contains the ImportRoot and map of import path
// to PackageOrErr - each path under the root that exists will have either a
// Package, or an error describing why the directory is not a valid package.
-func listPackages(fileRoot, importRoot string) (PackageTree, error) {
+func ListPackages(fileRoot, importRoot string) (PackageTree, error) {
// Set up a build.ctx for parsing
ctx := build.Default
ctx.GOROOT = ""
@@ -148,9 +148,6 @@
// We do skip dot-dirs, though, because it's such a ubiquitous standard
// that they not be visited by normal commands, and because things get
// really weird if we don't.
- //
- // TODO(sdboyer) does this entail that we should chuck dot-led import
- // paths later on?
if strings.HasPrefix(fi.Name(), ".") {
return filepath.SkipDir
}
@@ -297,19 +294,279 @@
return fmt.Sprintf("import path %s had problematic local imports", e.Dir)
}
+func readFileBuildTags(fp string) ([]string, error) {
+ co, err := readGoContents(fp)
+ if err != nil {
+ return []string{}, err
+ }
+
+ var tags []string
+ // Only look at places where we had a code comment.
+ if len(co) > 0 {
+ t := findTags(co)
+ for _, tg := range t {
+ found := false
+ for _, tt := range tags {
+ if tt == tg {
+ found = true
+ }
+ }
+ if !found {
+ tags = append(tags, tg)
+ }
+ }
+ }
+
+ return tags, nil
+}
+
+// Read contents of a Go file up to the package declaration. This can be used
+// to find the the build tags.
+func readGoContents(fp string) ([]byte, error) {
+ f, err := os.Open(fp)
+ defer f.Close()
+ if err != nil {
+ return []byte{}, err
+ }
+
+ var s scanner.Scanner
+ s.Init(f)
+ var tok rune
+ var pos scanner.Position
+ for tok != scanner.EOF {
+ tok = s.Scan()
+
+ // Getting the token text will skip comments by default.
+ tt := s.TokenText()
+ // build tags will not be after the package declaration.
+ if tt == "package" {
+ pos = s.Position
+ break
+ }
+ }
+
+ var buf bytes.Buffer
+ f.Seek(0, 0)
+ _, err = io.CopyN(&buf, f, int64(pos.Offset))
+ if err != nil {
+ return []byte{}, err
+ }
+
+ return buf.Bytes(), nil
+}
+
+// From a byte slice of a Go file find the tags.
+func findTags(co []byte) []string {
+ p := co
+ var tgs []string
+ for len(p) > 0 {
+ line := p
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, p = line[:i], p[i+1:]
+ } else {
+ p = p[len(p):]
+ }
+ line = bytes.TrimSpace(line)
+ // Only look at comment lines that are well formed in the Go style
+ if bytes.HasPrefix(line, []byte("//")) {
+ line = bytes.TrimSpace(line[len([]byte("//")):])
+ if len(line) > 0 && line[0] == '+' {
+ f := strings.Fields(string(line))
+
+ // We've found a +build tag line.
+ if f[0] == "+build" {
+ for _, tg := range f[1:] {
+ tgs = append(tgs, tg)
+ }
+ }
+ }
+ }
+ }
+
+ return tgs
+}
+
+// A PackageTree represents the results of recursively parsing a tree of
+// packages, starting at the ImportRoot. The results of parsing the files in the
+// directory identified by each import path - a Package or an error - are stored
+// in the Packages map, keyed by that import path.
+type PackageTree struct {
+ ImportRoot string
+ Packages map[string]PackageOrErr
+}
+
+// dup copies the PackageTree.
+//
+// This is really only useful as a defensive measure to prevent external state
+// mutations.
+func (t PackageTree) dup() PackageTree {
+ t2 := PackageTree{
+ ImportRoot: t.ImportRoot,
+ Packages: map[string]PackageOrErr{},
+ }
+
+ for path, poe := range t.Packages {
+ poe2 := PackageOrErr{
+ Err: poe.Err,
+ P: poe.P,
+ }
+ if len(poe.P.Imports) > 0 {
+ poe2.P.Imports = make([]string, len(poe.P.Imports))
+ copy(poe2.P.Imports, poe.P.Imports)
+ }
+ if len(poe.P.TestImports) > 0 {
+ poe2.P.TestImports = make([]string, len(poe.P.TestImports))
+ copy(poe2.P.TestImports, poe.P.TestImports)
+ }
+
+ t2.Packages[path] = poe2
+ }
+
+ return t2
+}
+
type wm struct {
err error
ex map[string]bool
in map[string]bool
}
-// wmToReach takes an externalReach()-style workmap and transitively walks all
-// internal imports until they reach an external path or terminate, then
+// PackageOrErr stores the results of attempting to parse a single directory for
+// Go source code.
+type PackageOrErr struct {
+ P Package
+ Err error
+}
+
+// ReachMap maps a set of import paths (keys) to the set of external packages
+// transitively reachable from the packages at those import paths.
+//
+// See PackageTree.ExternalReach() for more information.
+type ReachMap map[string][]string
+
+// ExternalReach looks through a PackageTree and computes the list of external
+// import statements (that is, import statements pointing to packages that are
+// not logical children of PackageTree.ImportRoot) that are transitively
+// imported by the internal packages in the tree.
+//
+// main indicates whether (true) or not (false) to include main packages in the
+// analysis. When utilized by gps' solver, main packages are generally excluded
+// from analyzing anything other than the root project, as they necessarily can't
+// be imported.
+//
+// tests indicates whether (true) or not (false) to include imports from test
+// files in packages when computing the reach map.
+//
+// ignore is a map of import paths that, if encountered, should be excluded from
+// analysis. This exclusion applies to both internal and external packages. If
+// an external import path is ignored, it is simply omitted from the results.
+//
+// If an internal path is ignored, then not only does it not appear in the final
+// map, but it is also excluded from the transitive calculations of other
+// internal packages. That is, if you ignore A/foo, then the external package
+// list for all internal packages that import A/foo will not include external
+// packages that are only reachable through A/foo.
+//
+// Visually, this means that, given a PackageTree with root A and packages at A,
+// A/foo, and A/bar, and the following import chain:
+//
+// A -> A/foo -> A/bar -> B/baz
+//
+// In this configuration, all of A's packages transitively import B/baz, so the
+// returned map would be:
+//
+// map[string][]string{
+// "A": []string{"B/baz"},
+// "A/foo": []string{"B/baz"}
+// "A/bar": []string{"B/baz"},
+// }
+//
+// However, if you ignore A/foo, then A's path to B/baz is broken, and A/foo is
+// omitted entirely. Thus, the returned map would be:
+//
+// map[string][]string{
+// "A": []string{},
+// "A/bar": []string{"B/baz"},
+// }
+//
+// If there are no packages to ignore, it is safe to pass a nil map.
+func (t PackageTree) ExternalReach(main, tests bool, ignore map[string]bool) ReachMap {
+ if ignore == nil {
+ ignore = make(map[string]bool)
+ }
+
+ // world's simplest adjacency list
+ workmap := make(map[string]wm)
+
+ var imps []string
+ for ip, perr := range t.Packages {
+ if perr.Err != nil {
+ workmap[ip] = wm{
+ err: perr.Err,
+ }
+ continue
+ }
+ p := perr.P
+
+ // Skip main packages, unless param says otherwise
+ if p.Name == "main" && !main {
+ continue
+ }
+ // Skip ignored packages
+ if ignore[ip] {
+ continue
+ }
+
+ imps = imps[:0]
+ imps = p.Imports
+ if tests {
+ imps = dedupeStrings(imps, p.TestImports)
+ }
+
+ w := wm{
+ ex: make(map[string]bool),
+ in: make(map[string]bool),
+ }
+
+ for _, imp := range imps {
+ // Skip ignored imports
+ if ignore[imp] {
+ continue
+ }
+
+ if !checkPrefixSlash(filepath.Clean(imp), t.ImportRoot) {
+ w.ex[imp] = true
+ } else {
+ if w2, seen := workmap[imp]; seen {
+ for i := range w2.ex {
+ w.ex[i] = true
+ }
+ for i := range w2.in {
+ w.in[i] = true
+ }
+ } else {
+ w.in[imp] = true
+ }
+ }
+ }
+
+ workmap[ip] = w
+ }
+
+ //return wmToReach(workmap, t.ImportRoot)
+ return wmToReach(workmap, "") // TODO(sdboyer) this passes tests, but doesn't seem right
+}
+
+// wmToReach takes an internal "workmap" constructed by
+// PackageTree.ExternalReach(), transitively walks (via depth-first traversal)
+// all internal imports until they reach an external path or terminate, then
// translates the results into a slice of external imports for each internal
// pkg.
//
// The basedir string, with a trailing slash ensured, will be stripped from the
// keys of the returned map.
+//
+// This is mostly separated out for testing purposes.
func wmToReach(workmap map[string]wm, basedir string) map[string][]string {
// Uses depth-first exploration to compute reachability into external
// packages, dropping any internal packages on "poisoned paths" - a path
@@ -502,351 +759,9 @@
return rm
}
-func readBuildTags(p string) ([]string, error) {
- _, err := os.Stat(p)
- if err != nil {
- return []string{}, err
- }
-
- d, err := os.Open(p)
- if err != nil {
- return []string{}, err
- }
-
- objects, err := d.Readdir(-1)
- if err != nil {
- return []string{}, err
- }
-
- var tags []string
- for _, obj := range objects {
-
- // only process Go files
- if strings.HasSuffix(obj.Name(), ".go") {
- fp := filepath.Join(p, obj.Name())
-
- co, err := readGoContents(fp)
- if err != nil {
- return []string{}, err
- }
-
- // Only look at places where we had a code comment.
- if len(co) > 0 {
- t := findTags(co)
- for _, tg := range t {
- found := false
- for _, tt := range tags {
- if tt == tg {
- found = true
- }
- }
- if !found {
- tags = append(tags, tg)
- }
- }
- }
- }
- }
-
- return tags, nil
-}
-
-func readFileBuildTags(fp string) ([]string, error) {
- co, err := readGoContents(fp)
- if err != nil {
- return []string{}, err
- }
-
- var tags []string
- // Only look at places where we had a code comment.
- if len(co) > 0 {
- t := findTags(co)
- for _, tg := range t {
- found := false
- for _, tt := range tags {
- if tt == tg {
- found = true
- }
- }
- if !found {
- tags = append(tags, tg)
- }
- }
- }
-
- return tags, nil
-}
-
-// Read contents of a Go file up to the package declaration. This can be used
-// to find the the build tags.
-func readGoContents(fp string) ([]byte, error) {
- f, err := os.Open(fp)
- defer f.Close()
- if err != nil {
- return []byte{}, err
- }
-
- var s scanner.Scanner
- s.Init(f)
- var tok rune
- var pos scanner.Position
- for tok != scanner.EOF {
- tok = s.Scan()
-
- // Getting the token text will skip comments by default.
- tt := s.TokenText()
- // build tags will not be after the package declaration.
- if tt == "package" {
- pos = s.Position
- break
- }
- }
-
- var buf bytes.Buffer
- f.Seek(0, 0)
- _, err = io.CopyN(&buf, f, int64(pos.Offset))
- if err != nil {
- return []byte{}, err
- }
-
- return buf.Bytes(), nil
-}
-
-// From a byte slice of a Go file find the tags.
-func findTags(co []byte) []string {
- p := co
- var tgs []string
- for len(p) > 0 {
- line := p
- if i := bytes.IndexByte(line, '\n'); i >= 0 {
- line, p = line[:i], p[i+1:]
- } else {
- p = p[len(p):]
- }
- line = bytes.TrimSpace(line)
- // Only look at comment lines that are well formed in the Go style
- if bytes.HasPrefix(line, []byte("//")) {
- line = bytes.TrimSpace(line[len([]byte("//")):])
- if len(line) > 0 && line[0] == '+' {
- f := strings.Fields(string(line))
-
- // We've found a +build tag line.
- if f[0] == "+build" {
- for _, tg := range f[1:] {
- tgs = append(tgs, tg)
- }
- }
- }
- }
- }
-
- return tgs
-}
-
-// Get an OS value that's not the one passed in.
-func getOsValue(n string) string {
- for _, o := range osList {
- if o != n {
- return o
- }
- }
-
- return n
-}
-
-func isSupportedOs(n string) bool {
- for _, o := range osList {
- if o == n {
- return true
- }
- }
-
- return false
-}
-
-// Get an Arch value that's not the one passed in.
-func getArchValue(n string) string {
- for _, o := range archList {
- if o != n {
- return o
- }
- }
-
- return n
-}
-
-func isSupportedArch(n string) bool {
- for _, o := range archList {
- if o == n {
- return true
- }
- }
-
- return false
-}
-
-func ensureTrailingSlash(s string) string {
- return strings.TrimSuffix(s, string(os.PathSeparator)) + string(os.PathSeparator)
-}
-
-// helper func to merge, dedupe, and sort strings
-func dedupeStrings(s1, s2 []string) (r []string) {
- dedupe := make(map[string]bool)
-
- if len(s1) > 0 && len(s2) > 0 {
- for _, i := range s1 {
- dedupe[i] = true
- }
- for _, i := range s2 {
- dedupe[i] = true
- }
-
- for i := range dedupe {
- r = append(r, i)
- }
- // And then re-sort them
- sort.Strings(r)
- } else if len(s1) > 0 {
- r = s1
- } else if len(s2) > 0 {
- r = s2
- }
-
- return
-}
-
-// A PackageTree represents the results of recursively parsing a tree of
-// packages, starting at the ImportRoot. The results of parsing the files in the
-// directory identified by each import path - a Package or an error - are stored
-// in the Packages map, keyed by that import path.
-type PackageTree struct {
- ImportRoot string
- Packages map[string]PackageOrErr
-}
-
-// PackageOrErr stores the results of attempting to parse a single directory for
-// Go source code.
-type PackageOrErr struct {
- P Package
- Err error
-}
-
-// ExternalReach looks through a PackageTree and computes the list of external
-// import statements (that is, import statements pointing to packages that are
-// not logical children of PackageTree.ImportRoot) that are transitively
-// imported by the internal packages in the tree.
-//
-// main indicates whether (true) or not (false) to include main packages in the
-// analysis. When utilized by gps' solver, main packages are generally excluded
-// from analyzing anything other than the root project, as they necessarily can't
-// be imported.
-//
-// tests indicates whether (true) or not (false) to include imports from test
-// files in packages when computing the reach map.
-//
-// ignore is a map of import paths that, if encountered, should be excluded from
-// analysis. This exclusion applies to both internal and external packages. If
-// an external import path is ignored, it is simply omitted from the results.
-//
-// If an internal path is ignored, then not only does it not appear in the final
-// map, but it is also excluded from the transitive calculations of other
-// internal packages. That is, if you ignore A/foo, then the external package
-// list for all internal packages that import A/foo will not include external
-// packages that are only reachable through A/foo.
-//
-// Visually, this means that, given a PackageTree with root A and packages at A,
-// A/foo, and A/bar, and the following import chain:
-//
-// A -> A/foo -> A/bar -> B/baz
-//
-// In this configuration, all of A's packages transitively import B/baz, so the
-// returned map would be:
-//
-// map[string][]string{
-// "A": []string{"B/baz"},
-// "A/foo": []string{"B/baz"}
-// "A/bar": []string{"B/baz"},
-// }
-//
-// However, if you ignore A/foo, then A's path to B/baz is broken, and A/foo is
-// omitted entirely. Thus, the returned map would be:
-//
-// map[string][]string{
-// "A": []string{},
-// "A/bar": []string{"B/baz"},
-// }
-//
-// If there are no packages to ignore, it is safe to pass a nil map.
-func (t PackageTree) ExternalReach(main, tests bool, ignore map[string]bool) map[string][]string {
- if ignore == nil {
- ignore = make(map[string]bool)
- }
-
- // world's simplest adjacency list
- workmap := make(map[string]wm)
-
- var imps []string
- for ip, perr := range t.Packages {
- if perr.Err != nil {
- workmap[ip] = wm{
- err: perr.Err,
- }
- continue
- }
- p := perr.P
-
- // Skip main packages, unless param says otherwise
- if p.Name == "main" && !main {
- continue
- }
- // Skip ignored packages
- if ignore[ip] {
- continue
- }
-
- imps = imps[:0]
- imps = p.Imports
- if tests {
- imps = dedupeStrings(imps, p.TestImports)
- }
-
- w := wm{
- ex: make(map[string]bool),
- in: make(map[string]bool),
- }
-
- for _, imp := range imps {
- // Skip ignored imports
- if ignore[imp] {
- continue
- }
-
- if !checkPrefixSlash(filepath.Clean(imp), t.ImportRoot) {
- w.ex[imp] = true
- } else {
- if w2, seen := workmap[imp]; seen {
- for i := range w2.ex {
- w.ex[i] = true
- }
- for i := range w2.in {
- w.in[i] = true
- }
- } else {
- w.in[imp] = true
- }
- }
- }
-
- workmap[ip] = w
- }
-
- //return wmToReach(workmap, t.ImportRoot)
- return wmToReach(workmap, "") // TODO(sdboyer) this passes tests, but doesn't seem right
-}
-
// ListExternalImports computes a sorted, deduplicated list of all the external
-// packages that are reachable through imports from all valid packages in the
-// PackageTree.
+// packages that are reachable through imports from all valid packages in a
+// ReachMap, as computed by PackageTree.ExternalReach().
//
// main and tests determine whether main packages and test imports should be
// included in the calculation. "External" is defined as anything not prefixed,
@@ -910,10 +825,7 @@
// -> A/.bar -> B/baz
//
// A is legal, and it imports A/.bar, so the results will include B/baz.
-func (t PackageTree) ListExternalImports(main, tests bool, ignore map[string]bool) []string {
- // First, we need a reachmap
- rm := t.ExternalReach(main, tests, ignore)
-
+func (rm ReachMap) ListExternalImports() []string {
exm := make(map[string]struct{})
for pkg, reach := range rm {
// Eliminate import paths with any elements having leading dots, leading
@@ -962,3 +874,33 @@
}
return s == prefix || strings.HasPrefix(s, ensureTrailingSlash(prefix))
}
+
+func ensureTrailingSlash(s string) string {
+ return strings.TrimSuffix(s, string(os.PathSeparator)) + string(os.PathSeparator)
+}
+
+// helper func to merge, dedupe, and sort strings
+func dedupeStrings(s1, s2 []string) (r []string) {
+ dedupe := make(map[string]bool)
+
+ if len(s1) > 0 && len(s2) > 0 {
+ for _, i := range s1 {
+ dedupe[i] = true
+ }
+ for _, i := range s2 {
+ dedupe[i] = true
+ }
+
+ for i := range dedupe {
+ r = append(r, i)
+ }
+ // And then re-sort them
+ sort.Strings(r)
+ } else if len(s1) > 0 {
+ r = s1
+ } else if len(s2) > 0 {
+ r = s2
+ }
+
+ return
+}
diff --git a/vendor/github.com/sdboyer/gps/analysis_test.go b/vendor/github.com/sdboyer/gps/analysis_test.go
index 210d036..c21f53b 100644
--- a/vendor/github.com/sdboyer/gps/analysis_test.go
+++ b/vendor/github.com/sdboyer/gps/analysis_test.go
@@ -836,7 +836,7 @@
continue
}
- out, err := listPackages(fix.fileRoot, fix.importRoot)
+ out, err := ListPackages(fix.fileRoot, fix.importRoot)
if err != nil && fix.err == nil {
t.Errorf("listPackages(%q): Received error but none expected: %s", name, err)
@@ -889,7 +889,7 @@
func TestListExternalImports(t *testing.T) {
// There's enough in the 'varied' test case to test most of what matters
- vptree, err := listPackages(filepath.Join(getwd(t), "_testdata", "src", "varied"), "varied")
+ vptree, err := ListPackages(filepath.Join(getwd(t), "_testdata", "src", "varied"), "varied")
if err != nil {
t.Fatalf("listPackages failed on varied test case: %s", err)
}
@@ -900,7 +900,7 @@
var main, tests bool
validate := func() {
- result := vptree.ListExternalImports(main, tests, ignore)
+ result := vptree.ExternalReach(main, tests, ignore).ListExternalImports()
if !reflect.DeepEqual(expect, result) {
t.Errorf("Wrong imports in %q case:\n\t(GOT): %s\n\t(WNT): %s", name, result, expect)
}
@@ -1034,12 +1034,12 @@
validate()
// The only thing varied *doesn't* cover is disallowed path patterns
- ptree, err := listPackages(filepath.Join(getwd(t), "_testdata", "src", "disallow"), "disallow")
+ ptree, err := ListPackages(filepath.Join(getwd(t), "_testdata", "src", "disallow"), "disallow")
if err != nil {
t.Fatalf("listPackages failed on disallow test case: %s", err)
}
- result := ptree.ListExternalImports(false, false, nil)
+ result := ptree.ExternalReach(false, false, nil).ListExternalImports()
expect = []string{"github.com/sdboyer/gps", "hash", "sort"}
if !reflect.DeepEqual(expect, result) {
t.Errorf("Wrong imports in %q case:\n\t(GOT): %s\n\t(WNT): %s", name, result, expect)
@@ -1048,7 +1048,7 @@
func TestExternalReach(t *testing.T) {
// There's enough in the 'varied' test case to test most of what matters
- vptree, err := listPackages(filepath.Join(getwd(t), "_testdata", "src", "varied"), "varied")
+ vptree, err := ListPackages(filepath.Join(getwd(t), "_testdata", "src", "varied"), "varied")
if err != nil {
t.Fatalf("listPackages failed on varied test case: %s", err)
}
diff --git a/vendor/github.com/sdboyer/gps/bridge.go b/vendor/github.com/sdboyer/gps/bridge.go
index a7368e3..379cd4b 100644
--- a/vendor/github.com/sdboyer/gps/bridge.go
+++ b/vendor/github.com/sdboyer/gps/bridge.go
@@ -14,7 +14,6 @@
type sourceBridge interface {
SourceManager // composes SourceManager
verifyRootDir(path string) error
- computeRootReach() ([]string, error)
pairRevision(id ProjectIdentifier, r Revision) []Version
pairVersion(id ProjectIdentifier, v UnpairedVersion) PairedVersion
vendorCodeExists(id ProjectIdentifier) (bool, error)
@@ -68,7 +67,7 @@
}
func (b *bridge) GetManifestAndLock(id ProjectIdentifier, v Version) (Manifest, Lock, error) {
- if id.ProjectRoot == b.s.params.ImportRoot {
+ if id.ProjectRoot == ProjectRoot(b.s.rpt.ImportRoot) {
return b.s.rm, b.s.rl, nil
}
return b.sm.GetManifestAndLock(id, v)
@@ -339,57 +338,14 @@
return nil
}
-// computeRootReach is a specialized, less stringent version of listExternal
-// that allows for a bit of fuzziness in the source inputs.
-//
-// Specifically, we need to:
-// - Analyze test-type files as well as typical source files
-// - Make a best-effort attempt even if the code doesn't compile
-// - Include main packages in the analysis
-//
-// Perhaps most important is that we don't want to have the results of this
-// analysis be in any permanent cache, and we want to read directly from our
-// potentially messy root project source location on disk. Together, this means
-// that we can't ask the real SourceManager to do it.
-func (b *bridge) computeRootReach() ([]string, error) {
- // TODO(sdboyer) i now cannot remember the reasons why i thought being less stringent
- // in the analysis was OK. so, for now, we just compute a bog-standard list
- // of externally-touched packages, including mains and test.
- ptree, err := b.listRootPackages()
- if err != nil {
- return nil, err
- }
-
- return ptree.ListExternalImports(true, true, b.s.ig), nil
-}
-
-func (b *bridge) listRootPackages() (PackageTree, error) {
- if b.crp == nil {
- ptree, err := listPackages(b.s.params.RootDir, string(b.s.params.ImportRoot))
-
- b.crp = &struct {
- ptree PackageTree
- err error
- }{
- ptree: ptree,
- err: err,
- }
- }
- if b.crp.err != nil {
- return PackageTree{}, b.crp.err
- }
-
- return b.crp.ptree, nil
-}
-
// listPackages lists all the packages contained within the given project at a
// particular version.
//
// The root project is handled separately, as the source manager isn't
// responsible for that code.
func (b *bridge) ListPackages(id ProjectIdentifier, v Version) (PackageTree, error) {
- if id.ProjectRoot == b.s.params.ImportRoot {
- return b.listRootPackages()
+ if id.ProjectRoot == ProjectRoot(b.s.rpt.ImportRoot) {
+ panic("should never call ListPackages on root project")
}
return b.sm.ListPackages(id, v)
diff --git a/vendor/github.com/sdboyer/gps/example.go b/vendor/github.com/sdboyer/gps/example.go
index 2bbbe2c..c3a827a 100644
--- a/vendor/github.com/sdboyer/gps/example.go
+++ b/vendor/github.com/sdboyer/gps/example.go
@@ -32,10 +32,10 @@
// Set up params, including tracing
params := gps.SolveParameters{
RootDir: root,
- ImportRoot: gps.ProjectRoot(importroot),
Trace: true,
TraceLogger: log.New(os.Stdout, "", 0),
}
+ params.RootPackageTree, _ = gps.ListPackages(root, importroot)
// Set up a SourceManager with the NaiveAnalyzer
sourcemgr, _ := gps.NewSourceManager(NaiveAnalyzer{}, ".repocache", false)
diff --git a/vendor/github.com/sdboyer/gps/hash.go b/vendor/github.com/sdboyer/gps/hash.go
index 893c34e..ca9c9a2 100644
--- a/vendor/github.com/sdboyer/gps/hash.go
+++ b/vendor/github.com/sdboyer/gps/hash.go
@@ -20,7 +20,7 @@
// Do these checks up front before any other work is needed, as they're the
// only things that can cause errors
// Pass in magic root values, and the bridge will analyze the right thing
- ptree, err := s.b.ListPackages(ProjectIdentifier{ProjectRoot: s.params.ImportRoot}, nil)
+ ptree, err := s.b.ListPackages(ProjectIdentifier{ProjectRoot: ProjectRoot(s.params.RootPackageTree.ImportRoot)}, nil)
if err != nil {
return nil, badOptsFailure(fmt.Sprintf("Error while parsing packages under %s: %s", s.params.RootDir, err.Error()))
}
diff --git a/vendor/github.com/sdboyer/gps/hash_test.go b/vendor/github.com/sdboyer/gps/hash_test.go
index f356ced..51732ca 100644
--- a/vendor/github.com/sdboyer/gps/hash_test.go
+++ b/vendor/github.com/sdboyer/gps/hash_test.go
@@ -10,9 +10,9 @@
fix := basicFixtures["shared dependency with overlapping constraints"]
params := SolveParameters{
- RootDir: string(fix.ds[0].n),
- ImportRoot: fix.ds[0].n,
- Manifest: fix.rootmanifest(),
+ RootDir: string(fix.ds[0].n),
+ RootPackageTree: fix.rootTree(),
+ Manifest: fix.rootmanifest(),
}
s, err := Prepare(params, newdepspecSM(fix.ds, nil))
@@ -51,15 +51,16 @@
func TestHashInputsIgnores(t *testing.T) {
fix := basicFixtures["shared dependency with overlapping constraints"]
- rm := fix.rootmanifest().(simpleRootManifest)
+ rm := fix.rootmanifest().(simpleRootManifest).dup()
rm.ig = map[string]bool{
"foo": true,
"bar": true,
}
+
params := SolveParameters{
- RootDir: string(fix.ds[0].n),
- ImportRoot: fix.ds[0].n,
- Manifest: rm,
+ RootDir: string(fix.ds[0].n),
+ RootPackageTree: fix.rootTree(),
+ Manifest: rm,
}
s, err := Prepare(params, newdepspecSM(fix.ds, nil))
@@ -101,7 +102,7 @@
func TestHashInputsOverrides(t *testing.T) {
fix := basicFixtures["shared dependency with overlapping constraints"]
- rm := fix.rootmanifest().(simpleRootManifest)
+ rm := fix.rootmanifest().(simpleRootManifest).dup()
// First case - override something not in the root, just with network name
rm.ovr = map[ProjectRoot]ProjectProperties{
"c": ProjectProperties{
@@ -109,9 +110,9 @@
},
}
params := SolveParameters{
- RootDir: string(fix.ds[0].n),
- ImportRoot: fix.ds[0].n,
- Manifest: rm,
+ RootDir: string(fix.ds[0].n),
+ RootPackageTree: fix.rootTree(),
+ Manifest: rm,
}
s, err := Prepare(params, newdepspecSM(fix.ds, nil))
diff --git a/vendor/github.com/sdboyer/gps/manifest.go b/vendor/github.com/sdboyer/gps/manifest.go
index 94513d0..ff23ec0 100644
--- a/vendor/github.com/sdboyer/gps/manifest.go
+++ b/vendor/github.com/sdboyer/gps/manifest.go
@@ -90,6 +90,26 @@
func (m simpleRootManifest) IgnorePackages() map[string]bool {
return m.ig
}
+func (m simpleRootManifest) dup() simpleRootManifest {
+ m2 := simpleRootManifest{
+ c: make([]ProjectConstraint, len(m.c)),
+ tc: make([]ProjectConstraint, len(m.tc)),
+ ovr: ProjectConstraints{},
+ ig: map[string]bool{},
+ }
+
+ copy(m2.c, m.c)
+ copy(m2.tc, m.tc)
+
+ for k, v := range m.ovr {
+ m2.ovr[k] = v
+ }
+ for k, v := range m.ig {
+ m2.ig[k] = v
+ }
+
+ return m2
+}
// prepManifest ensures a manifest is prepared and safe for use by the solver.
// This is mostly about ensuring that no outside routine can modify the manifest
diff --git a/vendor/github.com/sdboyer/gps/solve_basic_test.go b/vendor/github.com/sdboyer/gps/solve_basic_test.go
index c0ca587..9fe9780 100644
--- a/vendor/github.com/sdboyer/gps/solve_basic_test.go
+++ b/vendor/github.com/sdboyer/gps/solve_basic_test.go
@@ -348,6 +348,7 @@
type specfix interface {
name() string
rootmanifest() RootManifest
+ rootTree() PackageTree
specs() []depspec
maxTries() int
solution() map[ProjectIdentifier]Version
@@ -413,6 +414,33 @@
}
}
+func (f basicFixture) rootTree() PackageTree {
+ var imp, timp []string
+ for _, dep := range f.ds[0].deps {
+ imp = append(imp, string(dep.Ident.ProjectRoot))
+ }
+ for _, dep := range f.ds[0].devdeps {
+ timp = append(timp, string(dep.Ident.ProjectRoot))
+ }
+
+ n := string(f.ds[0].n)
+ pt := PackageTree{
+ ImportRoot: n,
+ Packages: map[string]PackageOrErr{
+ string(n): {
+ P: Package{
+ ImportPath: n,
+ Name: n,
+ Imports: imp,
+ TestImports: timp,
+ },
+ },
+ },
+ }
+
+ return pt
+}
+
func (f basicFixture) failure() error {
return f.fail
}
@@ -667,6 +695,10 @@
"bar 1.0.1",
),
},
+ // This fixture describes a situation that should be impossible with a
+ // real-world VCS (contents of dep at same rev are different, as indicated
+ // by different constraints on bar). But, that's not the SUT here, so it's
+ // OK.
"pairs bare revs in lock with all versions": {
ds: []depspec{
mkDepspec("root 0.0.0", "foo ~1.0.1"),
@@ -682,7 +714,7 @@
),
r: mksolution(
"foo 1.0.2 foorev",
- "bar 1.0.1",
+ "bar 1.0.2",
),
},
"does not pair bare revs in manifest with unpaired lock version": {
@@ -703,6 +735,35 @@
"bar 1.0.1",
),
},
+ "lock to branch on old rev keeps old rev": {
+ ds: []depspec{
+ mkDepspec("root 0.0.0", "foo bmaster"),
+ mkDepspec("foo bmaster newrev"),
+ },
+ l: mklock(
+ "foo bmaster oldrev",
+ ),
+ r: mksolution(
+ "foo bmaster oldrev",
+ ),
+ },
+ // Whereas this is a normal situation for a branch, when it occurs for a
+ // tag, it means someone's been naughty upstream. Still, though, the outcome
+ // is the same.
+ //
+ // TODO(sdboyer) this needs to generate a warning, once we start doing that
+ "lock to now-moved tag on old rev keeps old rev": {
+ ds: []depspec{
+ mkDepspec("root 0.0.0", "foo ptaggerino"),
+ mkDepspec("foo ptaggerino newrev"),
+ },
+ l: mklock(
+ "foo ptaggerino oldrev",
+ ),
+ r: mksolution(
+ "foo ptaggerino oldrev",
+ ),
+ },
"includes root package's dev dependencies": {
ds: []depspec{
mkDepspec("root 1.0.0", "(dev) foo 1.0.0", "(dev) bar 1.0.0"),
@@ -1175,6 +1236,16 @@
}
func (sm *depspecSourceManager) GetManifestAndLock(id ProjectIdentifier, v Version) (Manifest, Lock, error) {
+ // If the input version is a PairedVersion, look only at its top version,
+ // not the underlying. This is generally consistent with the idea that, for
+ // this class of lookup, the rev probably DOES exist, but upstream changed
+ // it (typically a branch). For the purposes of tests, then, that's an OK
+ // scenario, because otherwise we'd have to enumerate all the revs in the
+ // fixture declarations, which would screw up other things.
+ if pv, ok := v.(PairedVersion); ok {
+ v = pv.Unpair()
+ }
+
for _, ds := range sm.specs {
if id.netName() == string(ds.n) && v.Matches(ds.v) {
return ds, dummyLock{}, nil
@@ -1210,7 +1281,7 @@
pid := pident{n: ProjectRoot(id.netName()), v: v}
if r, exists := sm.rm[pid]; exists {
- ptree := PackageTree{
+ return PackageTree{
ImportRoot: string(pid.n),
Packages: map[string]PackageOrErr{
string(pid.n): {
@@ -1221,8 +1292,29 @@
},
},
},
+ }, nil
+ }
+
+ // if incoming version was paired, walk the map and search for a match on
+ // top-only version
+ if pv, ok := v.(PairedVersion); ok {
+ uv := pv.Unpair()
+ for pid, r := range sm.rm {
+ if uv.Matches(pid.v) {
+ return PackageTree{
+ ImportRoot: string(pid.n),
+ Packages: map[string]PackageOrErr{
+ string(pid.n): {
+ P: Package{
+ ImportPath: string(pid.n),
+ Name: string(pid.n),
+ Imports: r[string(pid.n)],
+ },
+ },
+ },
+ }, nil
+ }
}
- return ptree, nil
}
return PackageTree{}, fmt.Errorf("Project %s at version %s could not be found", pid.n, v)
@@ -1272,10 +1364,6 @@
return nil
}
-func (sm *depspecSourceManager) VendorCodeExists(id ProjectIdentifier) (bool, error) {
- return false, nil
-}
-
func (sm *depspecSourceManager) Release() {}
func (sm *depspecSourceManager) ExportProject(id ProjectIdentifier, v Version, to string) error {
@@ -1308,26 +1396,11 @@
*bridge
}
-// override computeRootReach() on bridge to read directly out of the depspecs
-func (b *depspecBridge) computeRootReach() ([]string, error) {
- // This only gets called for the root project, so grab that one off the test
- // source manager
- dsm := b.sm.(fixSM)
- root := dsm.rootSpec()
-
- ptree, err := dsm.ListPackages(mkPI(string(root.n)), nil)
- if err != nil {
- return nil, err
- }
-
- return ptree.ListExternalImports(true, true, dsm.ignore()), nil
-}
-
// override verifyRoot() on bridge to prevent any filesystem interaction
func (b *depspecBridge) verifyRootDir(path string) error {
root := b.sm.(fixSM).rootSpec()
if string(root.n) != path {
- return fmt.Errorf("Expected only root project %q to computeRootReach(), got %q", root.n, path)
+ return fmt.Errorf("Expected only root project %q to verifyRootDir(), got %q", root.n, path)
}
return nil
@@ -1337,6 +1410,10 @@
return b.sm.(fixSM).ListPackages(id, v)
}
+func (sm *depspecBridge) vendorCodeExists(id ProjectIdentifier) (bool, error) {
+ return false, nil
+}
+
// enforce interfaces
var _ Manifest = depspec{}
var _ Lock = dummyLock{}
diff --git a/vendor/github.com/sdboyer/gps/solve_bimodal_test.go b/vendor/github.com/sdboyer/gps/solve_bimodal_test.go
index 9ebe483..f430ad9 100644
--- a/vendor/github.com/sdboyer/gps/solve_bimodal_test.go
+++ b/vendor/github.com/sdboyer/gps/solve_bimodal_test.go
@@ -3,6 +3,7 @@
import (
"fmt"
"path/filepath"
+ "strings"
)
// dsp - "depspec with packages"
@@ -791,6 +792,28 @@
return m
}
+func (f bimodalFixture) rootTree() PackageTree {
+ pt := PackageTree{
+ ImportRoot: string(f.ds[0].n),
+ Packages: map[string]PackageOrErr{},
+ }
+
+ for _, pkg := range f.ds[0].pkgs {
+ elems := strings.Split(pkg.path, "/")
+ pt.Packages[pkg.path] = PackageOrErr{
+ P: Package{
+ ImportPath: pkg.path,
+ Name: elems[len(elems)-1],
+ // TODO(sdboyer) ugh, tpkg type has no space for supporting test
+ // imports...
+ Imports: pkg.imports,
+ },
+ }
+ }
+
+ return pt
+}
+
func (f bimodalFixture) failure() error {
return f.fail
}
diff --git a/vendor/github.com/sdboyer/gps/solve_test.go b/vendor/github.com/sdboyer/gps/solve_test.go
index 53bcdcd..425dd50 100644
--- a/vendor/github.com/sdboyer/gps/solve_test.go
+++ b/vendor/github.com/sdboyer/gps/solve_test.go
@@ -19,7 +19,7 @@
// TODO(sdboyer) regression test ensuring that locks with only revs for projects don't cause errors
func init() {
- flag.StringVar(&fixtorun, "gps.fix", "", "A single fixture to run in TestBasicSolves")
+ flag.StringVar(&fixtorun, "gps.fix", "", "A single fixture to run in TestBasicSolves or TestBimodalSolves")
overrideMkBridge()
}
@@ -87,12 +87,12 @@
sm := newdepspecSM(fix.ds, nil)
params := SolveParameters{
- RootDir: string(fix.ds[0].n),
- ImportRoot: ProjectRoot(fix.ds[0].n),
- Manifest: fix.rootmanifest(),
- Lock: dummyLock{},
- Downgrade: fix.downgrade,
- ChangeAll: fix.changeall,
+ RootDir: string(fix.ds[0].n),
+ RootPackageTree: fix.rootTree(),
+ Manifest: fix.rootmanifest(),
+ Lock: dummyLock{},
+ Downgrade: fix.downgrade,
+ ChangeAll: fix.changeall,
}
if fix.l != nil {
@@ -137,12 +137,12 @@
sm := newbmSM(fix)
params := SolveParameters{
- RootDir: string(fix.ds[0].n),
- ImportRoot: ProjectRoot(fix.ds[0].n),
- Manifest: fix.rootmanifest(),
- Lock: dummyLock{},
- Downgrade: fix.downgrade,
- ChangeAll: fix.changeall,
+ RootDir: string(fix.ds[0].n),
+ RootPackageTree: fix.rootTree(),
+ Manifest: fix.rootmanifest(),
+ Lock: dummyLock{},
+ Downgrade: fix.downgrade,
+ ChangeAll: fix.changeall,
}
if fix.l != nil {
@@ -163,6 +163,13 @@
return fmt.Sprintf("%s (from %s)", id.ProjectRoot, id.NetworkName)
}
+ pv := func(v Version) string {
+ if pv, ok := v.(PairedVersion); ok {
+ return fmt.Sprintf("%s (%s)", pv.Unpair(), pv.Underlying())
+ }
+ return v.String()
+ }
+
fixfail := fix.failure()
if err != nil {
if fixfail == nil {
@@ -207,7 +214,7 @@
// delete result from map so we skip it on the reverse pass
delete(rp, p)
if v != av {
- t.Errorf("(fixture: %q) Expected version %q of project %q, but actual version was %q", fix.name(), v, ppi(p), av)
+ t.Errorf("(fixture: %q) Expected version %q of project %q, but actual version was %q", fix.name(), pv(v), ppi(p), pv(av))
}
}
}
@@ -217,7 +224,7 @@
if fv, exists := fix.solution()[p]; !exists {
t.Errorf("(fixture: %q) Unexpected project %q present in results", fix.name(), ppi(p))
} else if v != fv {
- t.Errorf("(fixture: %q) Got version %q of project %q, but expected version was %q", fix.name(), v, ppi(p), fv)
+ t.Errorf("(fixture: %q) Got version %q of project %q, but expected version was %q", fix.name(), pv(v), ppi(p), pv(fv))
}
}
}
@@ -232,7 +239,7 @@
// produce weird side effects.
func TestRootLockNoVersionPairMatching(t *testing.T) {
fix := basicFixture{
- n: "does not pair bare revs in manifest with unpaired lock version",
+ n: "does not match unpaired lock versions with paired real versions",
ds: []depspec{
mkDepspec("root 0.0.0", "foo *"), // foo's constraint rewritten below to foorev
mkDepspec("foo 1.0.0", "bar 1.0.0"),
@@ -247,7 +254,7 @@
),
r: mksolution(
"foo 1.0.2 foorev",
- "bar 1.0.1",
+ "bar 1.0.2",
),
}
@@ -262,10 +269,10 @@
l2[0].v = nil
params := SolveParameters{
- RootDir: string(fix.ds[0].n),
- ImportRoot: ProjectRoot(fix.ds[0].n),
- Manifest: fix.rootmanifest(),
- Lock: l2,
+ RootDir: string(fix.ds[0].n),
+ RootPackageTree: fix.rootTree(),
+ Manifest: fix.rootmanifest(),
+ Lock: l2,
}
res, err := fixSolve(params, sm)
@@ -303,7 +310,27 @@
t.Error("Prepare should have given error on empty import root, but gave:", err)
}
- params.ImportRoot = ProjectRoot(pn)
+ params.RootPackageTree = PackageTree{
+ ImportRoot: pn,
+ }
+ _, err = Prepare(params, sm)
+ if err == nil {
+ t.Errorf("Prepare should have errored on empty name")
+ } else if !strings.Contains(err.Error(), "at least one package") {
+ t.Error("Prepare should have given error on empty import root, but gave:", err)
+ }
+
+ params.RootPackageTree = PackageTree{
+ ImportRoot: pn,
+ Packages: map[string]PackageOrErr{
+ pn: {
+ P: Package{
+ ImportPath: pn,
+ Name: pn,
+ },
+ },
+ },
+ }
params.Trace = true
_, err = Prepare(params, sm)
if err == nil {
diff --git a/vendor/github.com/sdboyer/gps/solver.go b/vendor/github.com/sdboyer/gps/solver.go
index f7d9a24..8993b78 100644
--- a/vendor/github.com/sdboyer/gps/solver.go
+++ b/vendor/github.com/sdboyer/gps/solver.go
@@ -31,16 +31,15 @@
// A real path to a readable directory is required.
RootDir string
- // The import path at the base of all import paths covered by the project.
- // For example, the appropriate value for gps itself here is:
+ // The tree of packages that comprise the root project, as well as the
+ // import path that should identify the root of that tree.
//
- // github.com/sdboyer/gps
+ // In most situations, tools should simply pass the result of ListPackages()
+ // directly through here.
//
- // In most cases, this should match the latter portion of RootDir. However,
- // that is not (currently) required.
- //
- // A non-empty string is required.
- ImportRoot ProjectRoot
+ // The ImportRoot property must be a non-empty string, and at least one
+ // element must be present in the Packages map.
+ RootPackageTree PackageTree
// The root manifest. This contains all the dependency constraints
// associated with normal Manifests, as well as the particular controls
@@ -157,6 +156,9 @@
// A defensively-copied instance of the root lock.
rl Lock
+
+ // A defensively-copied instance of params.RootPackageTree
+ rpt PackageTree
}
// A Solver is the main workhorse of gps: given a set of project inputs, it
@@ -192,9 +194,12 @@
if params.RootDir == "" {
return nil, badOptsFailure("params must specify a non-empty root directory")
}
- if params.ImportRoot == "" {
+ if params.RootPackageTree.ImportRoot == "" {
return nil, badOptsFailure("params must include a non-empty import root")
}
+ if len(params.RootPackageTree.Packages) == 0 {
+ return nil, badOptsFailure("at least one package must be present in the PackageTree")
+ }
if params.Trace && params.TraceLogger == nil {
return nil, badOptsFailure("trace requested, but no logger provided")
}
@@ -208,6 +213,7 @@
ig: params.Manifest.IgnorePackages(),
ovr: params.Manifest.Overrides(),
tl: params.TraceLogger,
+ rpt: params.RootPackageTree.dup(),
}
// Ensure the ignore and overrides maps are at least initialized
@@ -316,7 +322,7 @@
return soln, err
}
-// solve is the top-level loop for the SAT solving process.
+// solve is the top-level loop for the solving process.
func (s *solver) solve() (map[atom]map[string]struct{}, error) {
// Main solving loop
for {
@@ -331,9 +337,9 @@
// satisfiability and selection paths depending on whether we've already
// selected the base project/repo that came off the unselected queue.
//
- // (If we already have selected the project, other parts of the
- // algorithm guarantee the bmi will contain at least one package from
- // this project that has yet to be selected.)
+ // (If we've already selected the project, other parts of the algorithm
+ // guarantee the bmi will contain at least one package from this project
+ // that has yet to be selected.)
if awp, is := s.sel.selected(bmi.id); !is {
// Analysis path for when we haven't selected the project yet - need
// to create a version queue.
@@ -425,7 +431,7 @@
func (s *solver) selectRoot() error {
pa := atom{
id: ProjectIdentifier{
- ProjectRoot: s.params.ImportRoot,
+ ProjectRoot: ProjectRoot(s.rpt.ImportRoot),
},
// This is a hack so that the root project doesn't have a nil version.
// It's sort of OK because the root never makes it out into the results.
@@ -462,7 +468,7 @@
// Err is not possible at this point, as it could only come from
// listPackages(), which if we're here already succeeded for root
- reach, _ := s.b.computeRootReach()
+ reach := s.rpt.ExternalReach(true, true, s.ig).ListExternalImports()
deps, err := s.intersectConstraintsWithImports(mdeps, reach)
if err != nil {
@@ -472,8 +478,8 @@
for _, dep := range deps {
// If we have no lock, or if this dep isn't in the lock, then prefetch
- // it. See explanation longer comment in selectRoot() for how we benefit
- // from parallelism here.
+ // it. See longer explanation in selectRoot() for how we benefit from
+ // parallelism here.
if _, has := s.rlm[dep.Ident.ProjectRoot]; !has {
go s.b.SyncSourceFor(dep.Ident)
}
@@ -490,7 +496,7 @@
func (s *solver) getImportsAndConstraintsOf(a atomWithPackages) ([]completeDep, error) {
var err error
- if s.params.ImportRoot == a.a.id.ProjectRoot {
+ if ProjectRoot(s.rpt.ImportRoot) == a.a.id.ProjectRoot {
panic("Should never need to recheck imports/constraints from root during solve")
}
@@ -625,7 +631,7 @@
func (s *solver) createVersionQueue(bmi bimodalIdentifier) (*versionQueue, error) {
id := bmi.id
// If on the root package, there's no queue to make
- if s.params.ImportRoot == id.ProjectRoot {
+ if ProjectRoot(s.rpt.ImportRoot) == id.ProjectRoot {
return newVersionQueue(id, nil, nil, s.b)
}
@@ -665,7 +671,7 @@
// TODO(sdboyer) nested loop; prime candidate for a cache somewhere
for _, dep := range s.sel.getDependenciesOn(bmi.id) {
// Skip the root, of course
- if s.params.ImportRoot == dep.depender.id.ProjectRoot {
+ if ProjectRoot(s.rpt.ImportRoot) == dep.depender.id.ProjectRoot {
continue
}
@@ -1023,7 +1029,7 @@
// selection?
// skip if the root project
- if s.params.ImportRoot != id.ProjectRoot {
+ if ProjectRoot(s.rpt.ImportRoot) != id.ProjectRoot {
// just look for the first (oldest) one; the backtracker will necessarily
// traverse through and pop off any earlier ones
for _, vq := range s.vqs {
diff --git a/vendor/github.com/sdboyer/gps/source.go b/vendor/github.com/sdboyer/gps/source.go
index 6256c51..75265d9 100644
--- a/vendor/github.com/sdboyer/gps/source.go
+++ b/vendor/github.com/sdboyer/gps/source.go
@@ -335,7 +335,7 @@
err = bs.crepo.r.UpdateVersion(v.String())
}
- ptree, err = listPackages(bs.crepo.r.LocalPath(), string(pr))
+ ptree, err = ListPackages(bs.crepo.r.LocalPath(), string(pr))
bs.crepo.mut.Unlock()
// TODO(sdboyer) cache errs?
diff --git a/vendor/github.com/sdboyer/gps/trace.go b/vendor/github.com/sdboyer/gps/trace.go
index 4c20279..e08dcf7 100644
--- a/vendor/github.com/sdboyer/gps/trace.go
+++ b/vendor/github.com/sdboyer/gps/trace.go
@@ -109,7 +109,7 @@
// so who cares
rm := ptree.ExternalReach(true, true, s.ig)
- s.tl.Printf("Root project is %q", s.params.ImportRoot)
+ s.tl.Printf("Root project is %q", s.rpt.ImportRoot)
var expkgs int
for _, cdep := range cdeps {
diff --git a/vendor/github.com/sdboyer/gps/vcs_source.go b/vendor/github.com/sdboyer/gps/vcs_source.go
index 277b1db..ecded0c 100644
--- a/vendor/github.com/sdboyer/gps/vcs_source.go
+++ b/vendor/github.com/sdboyer/gps/vcs_source.go
@@ -54,9 +54,10 @@
if rv, ok := v.(PairedVersion); ok {
vstr = rv.Underlying().String()
}
- _, err = r.RunFromDir("git", "read-tree", vstr)
+
+ out, err := r.RunFromDir("git", "read-tree", vstr)
if err != nil {
- return err
+ return fmt.Errorf("%s: %s", out, err)
}
// Ensure we have exactly one trailing slash
@@ -68,8 +69,11 @@
// the alternative is using plain checkout, though we have a bunch of
// housekeeping to do to set up, then tear down, the sparse checkout
// controls, as well as restore the original index and HEAD.
- _, err = r.RunFromDir("git", "checkout-index", "-a", "--prefix="+to)
- return err
+ out, err = r.RunFromDir("git", "checkout-index", "-a", "--prefix="+to)
+ if err != nil {
+ return fmt.Errorf("%s: %s", out, err)
+ }
+ return nil
}
func (s *gitSource) listVersions() (vlist []Version, err error) {
diff --git a/vendor/github.com/sdboyer/gps/version.go b/vendor/github.com/sdboyer/gps/version.go
index 1e15029..f288b2a 100644
--- a/vendor/github.com/sdboyer/gps/version.go
+++ b/vendor/github.com/sdboyer/gps/version.go
@@ -411,11 +411,7 @@
}
switch tv := v.v.(type) {
- case plainVersion:
- if tv.Matches(v2) {
- return true
- }
- case branchVersion:
+ case plainVersion, branchVersion:
if tv.Matches(v2) {
return true
}
@@ -491,22 +487,23 @@
case branchVersion, plainVersion, semVersion:
return 1
}
- case branchVersion:
- switch r.(type) {
- case Revision:
- return -1
- case branchVersion:
- return 0
- case plainVersion, semVersion:
- return 1
- }
case plainVersion:
switch r.(type) {
- case Revision, branchVersion:
+ case Revision:
return -1
case plainVersion:
return 0
+ case branchVersion, semVersion:
+ return 1
+ }
+
+ case branchVersion:
+ switch r.(type) {
+ case Revision, plainVersion:
+ return -1
+ case branchVersion:
+ return 0
case semVersion:
return 1
}
@@ -531,9 +528,9 @@
// - Semver versions with a prerelease are after *all* non-prerelease semver.
// Against each other, they are sorted first by their numerical component, then
// lexicographically by their prerelease version.
+// - All branches are next, and sort lexicographically against each other.
// - All non-semver versions (tags) are next, and sort lexicographically
// against each other.
-// - All branches are next, and sort lexicographically against each other.
// - Revisions are last, and sort lexicographically against each other.
//
// So, given a slice of the following versions:
@@ -553,14 +550,15 @@
// SortForDowngrade sorts a slice of []Version in roughly ascending order, so
// that presumably older versions are visited first.
//
-// This is *not* the reverse of the same as SortForUpgrade (or you could simply
-// sort.Reverse(). The type precedence is the same, including the
-// semver vs. semver-with-prerelease relation. Lexicographic comparisons within
-// non-semver tags, branches, and revisions remains the same as well; because
-// these domains have no implicit chronology, there is no reason to reverse
+// This is *not* the same as reversing SortForUpgrade (or you could simply
+// sort.Reverse()). The type precedence is the same, including the semver vs.
+// semver-with-prerelease relation. Lexicographic comparisons within non-semver
+// tags, branches, and revisions remains the same as well; because we treat
+// these domains as having no ordering relations (chronology), there can be no
+// real concept of "upgrade" vs "downgrade", so there is no reason to reverse
// them.
//
-// The only binary relation that is reversed for downgrade is within-type
+// Thus, the only binary relation that is reversed for downgrade is within-type
// comparisons for semver (with and without prerelease).
//
// So, given a slice of the following versions:
diff --git a/vendor/github.com/sdboyer/gps/version_test.go b/vendor/github.com/sdboyer/gps/version_test.go
index 436dbe4..394bb27 100644
--- a/vendor/github.com/sdboyer/gps/version_test.go
+++ b/vendor/github.com/sdboyer/gps/version_test.go
@@ -32,15 +32,15 @@
edown := []Version{
v3, v4, v5, // semvers
- v6, v8, // plain versions
v1, v2, v7, // floating/branches
+ v6, v8, // plain versions
rev, // revs
}
eup := []Version{
v5, v4, v3, // semvers
- v6, v8, // plain versions
v1, v2, v7, // floating/branches
+ v6, v8, // plain versions
rev, // revs
}
diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE
index a68e67f..866d74a 100644
--- a/vendor/gopkg.in/yaml.v2/LICENSE
+++ b/vendor/gopkg.in/yaml.v2/LICENSE
@@ -1,188 +1,13 @@
+Copyright 2011-2016 Canonical Ltd.
-Copyright (c) 2011-2014 - Canonical Inc.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
-This software is licensed under the LGPLv3, included below.
+ http://www.apache.org/licenses/LICENSE-2.0
-As a special exception to the GNU Lesser General Public License version 3
-("LGPL3"), the copyright holders of this Library give you permission to
-convey to a third party a Combined Work that links statically or dynamically
-to this Library without providing any Minimal Corresponding Source or
-Minimal Application Code as set out in 4d or providing the installation
-information set out in section 4e, provided that you comply with the other
-provisions of LGPL3 and provided that you meet, for the Application the
-terms and conditions of the license(s) which apply to the Application.
-
-Except as stated in this special exception, the provisions of LGPL3 will
-continue to comply in full to this Library. If you modify this Library, you
-may apply this exception to your version of this Library, but you are not
-obliged to do so. If you do not wish to do so, delete this exception
-statement from your version. This exception does not (and cannot) modify any
-license terms which apply to the Application, with which you must still
-comply.
-
-
- GNU LESSER GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-
- This version of the GNU Lesser General Public License incorporates
-the terms and conditions of version 3 of the GNU General Public
-License, supplemented by the additional permissions listed below.
-
- 0. Additional Definitions.
-
- As used herein, "this License" refers to version 3 of the GNU Lesser
-General Public License, and the "GNU GPL" refers to version 3 of the GNU
-General Public License.
-
- "The Library" refers to a covered work governed by this License,
-other than an Application or a Combined Work as defined below.
-
- An "Application" is any work that makes use of an interface provided
-by the Library, but which is not otherwise based on the Library.
-Defining a subclass of a class defined by the Library is deemed a mode
-of using an interface provided by the Library.
-
- A "Combined Work" is a work produced by combining or linking an
-Application with the Library. The particular version of the Library
-with which the Combined Work was made is also called the "Linked
-Version".
-
- The "Minimal Corresponding Source" for a Combined Work means the
-Corresponding Source for the Combined Work, excluding any source code
-for portions of the Combined Work that, considered in isolation, are
-based on the Application, and not on the Linked Version.
-
- The "Corresponding Application Code" for a Combined Work means the
-object code and/or source code for the Application, including any data
-and utility programs needed for reproducing the Combined Work from the
-Application, but excluding the System Libraries of the Combined Work.
-
- 1. Exception to Section 3 of the GNU GPL.
-
- You may convey a covered work under sections 3 and 4 of this License
-without being bound by section 3 of the GNU GPL.
-
- 2. Conveying Modified Versions.
-
- If you modify a copy of the Library, and, in your modifications, a
-facility refers to a function or data to be supplied by an Application
-that uses the facility (other than as an argument passed when the
-facility is invoked), then you may convey a copy of the modified
-version:
-
- a) under this License, provided that you make a good faith effort to
- ensure that, in the event an Application does not supply the
- function or data, the facility still operates, and performs
- whatever part of its purpose remains meaningful, or
-
- b) under the GNU GPL, with none of the additional permissions of
- this License applicable to that copy.
-
- 3. Object Code Incorporating Material from Library Header Files.
-
- The object code form of an Application may incorporate material from
-a header file that is part of the Library. You may convey such object
-code under terms of your choice, provided that, if the incorporated
-material is not limited to numerical parameters, data structure
-layouts and accessors, or small macros, inline functions and templates
-(ten or fewer lines in length), you do both of the following:
-
- a) Give prominent notice with each copy of the object code that the
- Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the object code with a copy of the GNU GPL and this license
- document.
-
- 4. Combined Works.
-
- You may convey a Combined Work under terms of your choice that,
-taken together, effectively do not restrict modification of the
-portions of the Library contained in the Combined Work and reverse
-engineering for debugging such modifications, if you also do each of
-the following:
-
- a) Give prominent notice with each copy of the Combined Work that
- the Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the Combined Work with a copy of the GNU GPL and this license
- document.
-
- c) For a Combined Work that displays copyright notices during
- execution, include the copyright notice for the Library among
- these notices, as well as a reference directing the user to the
- copies of the GNU GPL and this license document.
-
- d) Do one of the following:
-
- 0) Convey the Minimal Corresponding Source under the terms of this
- License, and the Corresponding Application Code in a form
- suitable for, and under terms that permit, the user to
- recombine or relink the Application with a modified version of
- the Linked Version to produce a modified Combined Work, in the
- manner specified by section 6 of the GNU GPL for conveying
- Corresponding Source.
-
- 1) Use a suitable shared library mechanism for linking with the
- Library. A suitable mechanism is one that (a) uses at run time
- a copy of the Library already present on the user's computer
- system, and (b) will operate properly with a modified version
- of the Library that is interface-compatible with the Linked
- Version.
-
- e) Provide Installation Information, but only if you would otherwise
- be required to provide such information under section 6 of the
- GNU GPL, and only to the extent that such information is
- necessary to install and execute a modified version of the
- Combined Work produced by recombining or relinking the
- Application with a modified version of the Linked Version. (If
- you use option 4d0, the Installation Information must accompany
- the Minimal Corresponding Source and Corresponding Application
- Code. If you use option 4d1, you must provide the Installation
- Information in the manner specified by section 6 of the GNU GPL
- for conveying Corresponding Source.)
-
- 5. Combined Libraries.
-
- You may place library facilities that are a work based on the
-Library side by side in a single library together with other library
-facilities that are not Applications and are not covered by this
-License, and convey such a combined library under terms of your
-choice, if you do both of the following:
-
- a) Accompany the combined library with a copy of the same work based
- on the Library, uncombined with any other library facilities,
- conveyed under the terms of this License.
-
- b) Give prominent notice with the combined library that part of it
- is a work based on the Library, and explaining where to find the
- accompanying uncombined form of the same work.
-
- 6. Revised Versions of the GNU Lesser General Public License.
-
- The Free Software Foundation may publish revised and/or new versions
-of the GNU Lesser General Public License from time to time. Such new
-versions will be similar in spirit to the present version, but may
-differ in detail to address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Library as you received it specifies that a certain numbered version
-of the GNU Lesser General Public License "or any later version"
-applies to it, you have the option of following the terms and
-conditions either of that published version or of any later version
-published by the Free Software Foundation. If the Library as you
-received it does not specify a version number of the GNU Lesser
-General Public License, you may choose any version of the GNU Lesser
-General Public License ever published by the Free Software Foundation.
-
- If the Library as you received it specifies that a proxy can decide
-whether future versions of the GNU Lesser General Public License shall
-apply, that proxy's public statement of acceptance of any version is
-permanent authorization for you to choose that version for the
-Library.
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md
index 7b8bd86..1884de6 100644
--- a/vendor/gopkg.in/yaml.v2/README.md
+++ b/vendor/gopkg.in/yaml.v2/README.md
@@ -42,7 +42,7 @@
License
-------
-The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details.
+The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
Example