Chase gps to v0.11.1
Seems to still be a hiccup or two, but we're nearly back to working
order.
diff --git a/action/get.go b/action/get.go
index 8a3a979..785dc28 100644
--- a/action/get.go
+++ b/action/get.go
@@ -37,12 +37,18 @@
msg.Die("Could not find the vendor dir: %s", err)
}
+ rd := filepath.Dir(glidefile)
+ rt, err := gps.ListPackages(rd, conf.Name)
+ if err != nil {
+ msg.Die("Error while scanning project: %s", err)
+ }
+
params := gps.SolveParameters{
- RootDir: filepath.Dir(glidefile),
- ImportRoot: gps.ProjectRoot(conf.Name),
- Manifest: conf,
- Trace: true,
- TraceLogger: log.New(os.Stdout, "", 0),
+ RootDir: rd,
+ RootPackageTree: rt,
+ Manifest: conf,
+ Trace: true,
+ TraceLogger: log.New(os.Stdout, "", 0),
}
// We load the lock file early and bail out if there's a problem, because we
@@ -57,7 +63,7 @@
}
// Create the SourceManager for this run
- sm, err := gps.NewSourceManager(dependency.Analyzer{}, filepath.Join(installer.Home, "cache"), false)
+ sm, err := gps.NewSourceManager(dependency.Analyzer{}, filepath.Join(installer.Home, "cache"))
defer sm.Release()
if err != nil {
msg.Err(err.Error())
diff --git a/action/install.go b/action/install.go
index a719cc0..0ce5797 100644
--- a/action/install.go
+++ b/action/install.go
@@ -31,19 +31,25 @@
}
// Create the SourceManager for this run
- sm, err := gps.NewSourceManager(dependency.Analyzer{}, filepath.Join(installer.Home, "cache"), false)
+ sm, err := gps.NewSourceManager(dependency.Analyzer{}, filepath.Join(installer.Home, "cache"))
defer sm.Release()
if err != nil {
msg.Err(err.Error())
return
}
+ rd := filepath.Dir(vend)
+ rt, err := gps.ListPackages(rd, conf.Name)
+ if err != nil {
+ msg.Die("Error while scanning project: %s", err)
+ }
+
params := gps.SolveParameters{
- RootDir: filepath.Dir(vend),
- ImportRoot: gps.ProjectRoot(conf.Name),
- Manifest: conf,
- Trace: true,
- TraceLogger: log.New(os.Stdout, "", 0),
+ RootDir: rd,
+ RootPackageTree: rt,
+ Manifest: conf,
+ Trace: true,
+ TraceLogger: log.New(os.Stdout, "", 0),
}
var s gps.Solver
@@ -63,7 +69,7 @@
msg.Err("Could not set up solver: %s", err)
return
}
- digest, err := s.HashInputs()
+ digest := s.HashInputs()
// Check if digests match, and warn if they don't
if bytes.Equal(digest, params.Lock.InputHash()) {
diff --git a/action/update.go b/action/update.go
index 3221258..324b5a3 100644
--- a/action/update.go
+++ b/action/update.go
@@ -30,12 +30,18 @@
msg.Die("Could not find the vendor dir: %s", err)
}
+ rd := filepath.Dir(vend)
+ rt, err := gps.ListPackages(rd, conf.Name)
+ if err != nil {
+ msg.Die("Error while scanning project: %s", err)
+ }
+
params := gps.SolveParameters{
- RootDir: filepath.Dir(vend),
- ImportRoot: gps.ProjectRoot(conf.Name),
- Manifest: conf,
- Trace: true,
- TraceLogger: log.New(os.Stdout, "", 0),
+ RootDir: rd,
+ RootPackageTree: rt,
+ Manifest: conf,
+ Trace: true,
+ TraceLogger: log.New(os.Stdout, "", 0),
}
if len(projs) == 0 {
@@ -59,7 +65,7 @@
}
// Create the SourceManager for this run
- sm, err := gps.NewSourceManager(dependency.Analyzer{}, filepath.Join(installer.Home, "cache"), false)
+ sm, err := gps.NewSourceManager(dependency.Analyzer{}, filepath.Join(installer.Home, "cache"))
if err != nil {
msg.Err(err.Error())
return
diff --git a/glide.lock b/glide.lock
index 04611c5..bd87515 100644
--- a/glide.lock
+++ b/glide.lock
@@ -14,8 +14,8 @@
version: v1.8.0
revision: fbe9fb6ad5b5f35b3e82a7c21123cfc526cbf895
- name: github.com/sdboyer/gps
- branch: master
- revision: 166f36103aedbb9d835b9b6dcc7762a6bd900a98
+ version: v0.11.1
+ revision: 507f709c7eb2429371a22cff9f6077630827e450
- name: github.com/termie/go-shutil
revision: bcacb06fecaeec8dc42af03c87c6949f4a05c74c
- name: gopkg.in/yaml.v2
diff --git a/glide.yaml b/glide.yaml
index 2bd32d8..dcc78aa 100644
--- a/glide.yaml
+++ b/glide.yaml
@@ -18,4 +18,4 @@
- package: github.com/Masterminds/semver
branch: 2.x
- package: github.com/sdboyer/gps
- branch: master
+ version: ^0.11.0
diff --git a/vendor/github.com/sdboyer/gps/README.md b/vendor/github.com/sdboyer/gps/README.md
index 381c2a8..0cb902b 100644
--- a/vendor/github.com/sdboyer/gps/README.md
+++ b/vendor/github.com/sdboyer/gps/README.md
@@ -66,16 +66,16 @@
* A [**manifest** and
**lock**](https://github.com/sdboyer/gps/wiki/gps-for-Implementors#manifests-and-locks)
approach to tracking version and constraint information
-* Source repositories can be `git`, `bzr`, `hg` or `svn` (Most of the work here is through a [separate lib](https://github.com/Masterminds/vcs))
+* Upstream sources are one of `git`, `bzr`, `hg` or `svn` repositories
* What the available versions are for a given project/repository (all branches, tags, or revs are eligible)
- * In general, semver tags are preferred to plain tags, are preferred to branches
-* The actual packages required (determined through import graph static analysis)
+ * In general, semver tags are preferred to branches, are preferred to plain tags
+* The actual packages that must be present (determined through import graph static analysis)
* How the import graph is statically analyzed (Similar to `go/build`, but with a combinatorial view of build tags)
+* All packages from the same source (repository) must be the same version
* Package import cycles are not allowed ([not yet implemented](https://github.com/sdboyer/gps/issues/66))
There are also some current non-choices that we would like to push into the realm of choice:
-* Different versions of packages from the same repository cannot be used
* Importable projects that are not bound to the repository root
* Source inference around different import path patterns (e.g., how `github.com/*` or `my_company/*` are handled)
diff --git a/vendor/github.com/sdboyer/gps/example.go b/vendor/github.com/sdboyer/gps/example.go
index c3a827a..728439f 100644
--- a/vendor/github.com/sdboyer/gps/example.go
+++ b/vendor/github.com/sdboyer/gps/example.go
@@ -38,7 +38,7 @@
params.RootPackageTree, _ = gps.ListPackages(root, importroot)
// Set up a SourceManager with the NaiveAnalyzer
- sourcemgr, _ := gps.NewSourceManager(NaiveAnalyzer{}, ".repocache", false)
+ sourcemgr, _ := gps.NewSourceManager(NaiveAnalyzer{}, ".repocache")
defer sourcemgr.Release()
// Prep and run the solver
diff --git a/vendor/github.com/sdboyer/gps/hash.go b/vendor/github.com/sdboyer/gps/hash.go
index ca9c9a2..acede5c 100644
--- a/vendor/github.com/sdboyer/gps/hash.go
+++ b/vendor/github.com/sdboyer/gps/hash.go
@@ -2,7 +2,6 @@
import (
"crypto/sha256"
- "fmt"
"sort"
)
@@ -16,15 +15,7 @@
// unnecessary.
//
// (Basically, this is for memoization.)
-func (s *solver) HashInputs() ([]byte, error) {
- // Do these checks up front before any other work is needed, as they're the
- // only things that can cause errors
- // Pass in magic root values, and the bridge will analyze the right thing
- ptree, err := s.b.ListPackages(ProjectIdentifier{ProjectRoot: ProjectRoot(s.params.RootPackageTree.ImportRoot)}, nil)
- if err != nil {
- return nil, badOptsFailure(fmt.Sprintf("Error while parsing packages under %s: %s", s.params.RootDir, err.Error()))
- }
-
+func (s *solver) HashInputs() []byte {
c, tc := s.rm.DependencyConstraints(), s.rm.TestDependencyConstraints()
// Apply overrides to the constraints from the root. Otherwise, the hash
// would be computed on the basis of a constraint from root that doesn't
@@ -51,7 +42,7 @@
// Write each of the packages, or the errors that were found for a
// particular subpath, into the hash.
- for _, perr := range ptree.Packages {
+ for _, perr := range s.rpt.Packages {
if perr.Err != nil {
h.Write([]byte(perr.Err.Error()))
} else {
@@ -97,5 +88,5 @@
h.Write([]byte(an))
h.Write([]byte(av.String()))
- return h.Sum(nil), nil
+ return h.Sum(nil)
}
diff --git a/vendor/github.com/sdboyer/gps/hash_test.go b/vendor/github.com/sdboyer/gps/hash_test.go
index 51732ca..a257252 100644
--- a/vendor/github.com/sdboyer/gps/hash_test.go
+++ b/vendor/github.com/sdboyer/gps/hash_test.go
@@ -16,12 +16,12 @@
}
s, err := Prepare(params, newdepspecSM(fix.ds, nil))
-
- dig, err := s.HashInputs()
if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
+ t.Errorf("Unexpected error while prepping solver: %s", err)
+ t.FailNow()
}
+ dig := s.HashInputs()
h := sha256.New()
elems := []string{
@@ -64,12 +64,12 @@
}
s, err := Prepare(params, newdepspecSM(fix.ds, nil))
-
- dig, err := s.HashInputs()
if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
+ t.Errorf("Unexpected error while prepping solver: %s", err)
+ t.FailNow()
}
+ dig := s.HashInputs()
h := sha256.New()
elems := []string{
@@ -116,12 +116,12 @@
}
s, err := Prepare(params, newdepspecSM(fix.ds, nil))
-
- dig, err := s.HashInputs()
if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
+ t.Errorf("Unexpected error while prepping solver: %s", err)
+ t.FailNow()
}
+ dig := s.HashInputs()
h := sha256.New()
elems := []string{
@@ -154,11 +154,7 @@
rm.ovr["d"] = ProjectProperties{
Constraint: NewBranch("foobranch"),
}
- dig, err = s.HashInputs()
- if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
- }
-
+ dig = s.HashInputs()
h = sha256.New()
elems = []string{
@@ -194,11 +190,7 @@
NetworkName: "groucho",
Constraint: NewBranch("plexiglass"),
}
- dig, err = s.HashInputs()
- if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
- }
-
+ dig = s.HashInputs()
h = sha256.New()
elems = []string{
@@ -236,11 +228,7 @@
rm.ovr["a"] = ProjectProperties{
Constraint: NewVersion("fluglehorn"),
}
- dig, err = s.HashInputs()
- if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
- }
-
+ dig = s.HashInputs()
h = sha256.New()
elems = []string{
@@ -280,11 +268,7 @@
rm.ovr["a"] = ProjectProperties{
NetworkName: "nota",
}
- dig, err = s.HashInputs()
- if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
- }
-
+ dig = s.HashInputs()
h = sha256.New()
elems = []string{
@@ -326,11 +310,7 @@
NetworkName: "nota",
Constraint: NewVersion("fluglehorn"),
}
- dig, err = s.HashInputs()
- if err != nil {
- t.Fatalf("HashInputs returned unexpected err: %s", err)
- }
-
+ dig = s.HashInputs()
h = sha256.New()
elems = []string{
diff --git a/vendor/github.com/sdboyer/gps/manager_test.go b/vendor/github.com/sdboyer/gps/manager_test.go
index f3892d6..0daaef9 100644
--- a/vendor/github.com/sdboyer/gps/manager_test.go
+++ b/vendor/github.com/sdboyer/gps/manager_test.go
@@ -44,7 +44,7 @@
t.FailNow()
}
- sm, err := NewSourceManager(naiveAnalyzer{}, cpath, false)
+ sm, err := NewSourceManager(naiveAnalyzer{}, cpath)
if err != nil {
t.Errorf("Unexpected error on SourceManager creation: %s", err)
t.FailNow()
@@ -69,32 +69,45 @@
if err != nil {
t.Errorf("Failed to create temp dir: %s", err)
}
- _, err = NewSourceManager(naiveAnalyzer{}, cpath, false)
+ sm, err := NewSourceManager(naiveAnalyzer{}, cpath)
if err != nil {
t.Errorf("Unexpected error on SourceManager creation: %s", err)
}
- defer func() {
- err := removeAll(cpath)
- if err != nil {
- t.Errorf("removeAll failed: %s", err)
- }
- }()
- _, err = NewSourceManager(naiveAnalyzer{}, cpath, false)
+ _, err = NewSourceManager(naiveAnalyzer{}, cpath)
if err == nil {
t.Errorf("Creating second SourceManager should have failed due to file lock contention")
- }
-
- sm, err := NewSourceManager(naiveAnalyzer{}, cpath, true)
- defer sm.Release()
- if err != nil {
- t.Errorf("Creating second SourceManager should have succeeded when force flag was passed, but failed with err %s", err)
+ } else if te, ok := err.(CouldNotCreateLockError); !ok {
+ t.Errorf("Should have gotten CouldNotCreateLockError error type, but got %T", te)
}
if _, err = os.Stat(path.Join(cpath, "sm.lock")); err != nil {
t.Errorf("Global cache lock file not created correctly")
}
+
+ sm.Release()
+ err = removeAll(cpath)
+ if err != nil {
+ t.Errorf("removeAll failed: %s", err)
+ }
+
+ if _, err = os.Stat(path.Join(cpath, "sm.lock")); !os.IsNotExist(err) {
+ t.Errorf("Global cache lock file not cleared correctly on Release()")
+ t.FailNow()
+ }
+
+ // Set another one up at the same spot now, just to be sure
+ sm, err = NewSourceManager(naiveAnalyzer{}, cpath)
+ if err != nil {
+ t.Errorf("Creating a second SourceManager should have succeeded when the first was released, but failed with err %s", err)
+ }
+
+ sm.Release()
+ err = removeAll(cpath)
+ if err != nil {
+ t.Errorf("removeAll failed: %s", err)
+ }
}
func TestSourceInit(t *testing.T) {
@@ -109,7 +122,7 @@
t.FailNow()
}
- sm, err := NewSourceManager(naiveAnalyzer{}, cpath, false)
+ sm, err := NewSourceManager(naiveAnalyzer{}, cpath)
if err != nil {
t.Errorf("Unexpected error on SourceManager creation: %s", err)
t.FailNow()
@@ -144,10 +157,17 @@
SortForUpgrade(v)
for k, e := range expected {
- if v[k] != e {
+ if !v[k].Matches(e) {
t.Errorf("Expected version %s in position %v but got %s", e, k, v[k])
}
}
+
+ if !v[1].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected master branch version to have isDefault flag, but it did not")
+ }
+ if v[2].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected test branch version not to have isDefault flag, but it did")
+ }
}
// Two birds, one stone - make sure the internal ProjectManager vlist cache
@@ -175,10 +195,17 @@
}
for k, e := range expected {
- if v[k] != e {
+ if !v[k].Matches(e) {
t.Errorf("Expected version %s in position %v but got %s", e, k, v[k])
}
}
+
+ if !v[1].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected master branch version to have isDefault flag, but it did not")
+ }
+ if v[2].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected test branch version not to have isDefault flag, but it did")
+ }
}
present, err := smc.RevisionPresentIn(id, rev)
@@ -217,6 +244,51 @@
}
}
+func TestDefaultBranchAssignment(t *testing.T) {
+ if testing.Short() {
+ t.Skip("Skipping default branch assignment test in short mode")
+ }
+
+ sm, clean := mkNaiveSM(t)
+ defer clean()
+
+ id := mkPI("github.com/sdboyer/test-multibranch")
+ v, err := sm.ListVersions(id)
+ if err != nil {
+ t.Errorf("Unexpected error during initial project setup/fetching %s", err)
+ }
+
+ if len(v) != 3 {
+ t.Errorf("Expected three version results from the test repo, got %v", len(v))
+ } else {
+ brev := Revision("fda020843ac81352004b9dca3fcccdd517600149")
+ mrev := Revision("9f9c3a591773d9b28128309ac7a9a72abcab267d")
+ expected := []Version{
+ NewBranch("branchone").Is(brev),
+ NewBranch("otherbranch").Is(brev),
+ NewBranch("master").Is(mrev),
+ }
+
+ SortForUpgrade(v)
+
+ for k, e := range expected {
+ if !v[k].Matches(e) {
+ t.Errorf("Expected version %s in position %v but got %s", e, k, v[k])
+ }
+ }
+
+ if !v[0].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected branchone branch version to have isDefault flag, but it did not")
+ }
+ if !v[0].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected otherbranch branch version to have isDefault flag, but it did not")
+ }
+ if v[2].(versionPair).v.(branchVersion).isDefault {
+ t.Error("Expected master branch version not to have isDefault flag, but it did")
+ }
+ }
+}
+
func TestMgrMethodsFailWithBadPath(t *testing.T) {
// a symbol will always bork it up
bad := mkPI("foo/##&^").normalize()
diff --git a/vendor/github.com/sdboyer/gps/remove_go16.go b/vendor/github.com/sdboyer/gps/remove_go16.go
index 8c7844d..a25ea2f 100644
--- a/vendor/github.com/sdboyer/gps/remove_go16.go
+++ b/vendor/github.com/sdboyer/gps/remove_go16.go
@@ -24,8 +24,8 @@
}
// make sure all files are writable so we can delete them
- return filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
- if err != nil {
+ err = filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
+ if err != nil && err != filepath.SkipDir {
// walk gave us some error, give it back.
return err
}
@@ -33,6 +33,12 @@
if mode|0200 == mode {
return nil
}
+
return os.Chmod(path, mode|0200)
})
+ if err != nil {
+ return err
+ }
+
+ return os.Remove(path)
}
diff --git a/vendor/github.com/sdboyer/gps/result_test.go b/vendor/github.com/sdboyer/gps/result_test.go
index ac98678..d0fd972 100644
--- a/vendor/github.com/sdboyer/gps/result_test.go
+++ b/vendor/github.com/sdboyer/gps/result_test.go
@@ -73,7 +73,7 @@
tmp := path.Join(os.TempDir(), "vsolvtest")
clean := true
- sm, err := NewSourceManager(naiveAnalyzer{}, path.Join(tmp, "cache"), true)
+ sm, err := NewSourceManager(naiveAnalyzer{}, path.Join(tmp, "cache"))
if err != nil {
b.Errorf("NewSourceManager errored unexpectedly: %q", err)
clean = false
@@ -81,7 +81,7 @@
// Prefetch the projects before timer starts
for _, lp := range r.p {
- _, _, err := sm.GetManifestAndLock(lp.Ident(), lp.Version())
+ err := sm.SyncSourceFor(lp.Ident())
if err != nil {
b.Errorf("failed getting project info during prefetch: %s", err)
clean = false
diff --git a/vendor/github.com/sdboyer/gps/solver.go b/vendor/github.com/sdboyer/gps/solver.go
index 8993b78..5556589 100644
--- a/vendor/github.com/sdboyer/gps/solver.go
+++ b/vendor/github.com/sdboyer/gps/solver.go
@@ -175,7 +175,7 @@
// this Solver's inputs.
//
// In such a case, it may not be necessary to run Solve() at all.
- HashInputs() ([]byte, error)
+ HashInputs() []byte
// Solve initiates a solving run. It will either complete successfully with
// a Solution, or fail with an informative error.
@@ -304,10 +304,7 @@
att: s.attempts,
}
- // An err here is impossible; it could only be caused by a parsing error
- // of the root tree, but that necessarily already succeeded back up in
- // selectRoot(), so we can ignore the err return here
- soln.hd, _ = s.HashInputs()
+ soln.hd = s.HashInputs()
// Convert ProjectAtoms into LockedProjects
soln.p = make([]LockedProject, len(all))
@@ -440,17 +437,16 @@
v: rootRev,
}
- ptree, err := s.b.ListPackages(pa.id, nil)
- if err != nil {
- return err
- }
-
- list := make([]string, len(ptree.Packages))
+ list := make([]string, len(s.rpt.Packages))
k := 0
- for path := range ptree.Packages {
- list[k] = path
- k++
+ for path, pkg := range s.rpt.Packages {
+ if pkg.Err != nil {
+ list[k] = path
+ k++
+ }
}
+ list = list[:k]
+ sort.Strings(list)
a := atomWithPackages{
a: pa,
@@ -489,7 +485,7 @@
heap.Push(s.unsel, bimodalIdentifier{id: dep.Ident, pl: dep.pl, fromRoot: true})
}
- s.traceSelectRoot(ptree, deps)
+ s.traceSelectRoot(s.rpt, deps)
return nil
}
@@ -515,8 +511,8 @@
allex := ptree.ExternalReach(false, false, s.ig)
// Use a map to dedupe the unique external packages
exmap := make(map[string]struct{})
- // Add the packages reached by the packages explicitly listed in the atom to
- // the list
+ // Add to the list those packages that are reached by the packages
+ // explicitly listed in the atom
for _, pkg := range a.pl {
expkgs, exists := allex[pkg]
if !exists {
@@ -543,6 +539,7 @@
reach[k] = pkg
k++
}
+ sort.Strings(reach)
deps := s.ovr.overrideAll(m.DependencyConstraints())
return s.intersectConstraintsWithImports(deps, reach)
diff --git a/vendor/github.com/sdboyer/gps/source_manager.go b/vendor/github.com/sdboyer/gps/source_manager.go
index 82064e4..f59ae62 100644
--- a/vendor/github.com/sdboyer/gps/source_manager.go
+++ b/vendor/github.com/sdboyer/gps/source_manager.go
@@ -83,6 +83,7 @@
// tools; control via dependency injection is intended to be sufficient.
type SourceMgr struct {
cachedir string
+ lf *os.File
srcs map[string]source
srcmut sync.RWMutex
an ProjectAnalyzer
@@ -94,22 +95,19 @@
// NewSourceManager produces an instance of gps's built-in SourceManager. It
// takes a cache directory (where local instances of upstream repositories are
-// stored), a vendor directory for the project currently being worked on, and a
-// force flag indicating whether to overwrite the global cache lock file (if
-// present).
+// stored), and a ProjectAnalyzer that is used to extract manifest and lock
+// information from source trees.
//
// The returned SourceManager aggressively caches information wherever possible.
-// It is recommended that, if tools need to do preliminary, work involving
-// upstream repository analysis prior to invoking a solve run, that they create
-// this SourceManager as early as possible and use it to their ends. That way,
-// the solver can benefit from any caches that may have already been warmed.
+// If tools need to do preliminary work involving upstream repository analysis
+// prior to invoking a solve run, it is recommended that they create this
+// SourceManager as early as possible and use it to their ends. That way, the
+// solver can benefit from any caches that may have already been warmed.
//
-// gps's SourceManager is intended to be threadsafe (if it's not, please
-// file a bug!). It should certainly be safe to reuse from one solving run to
-// the next; however, the fact that it takes a basedir as an argument makes it
-// much less useful for simultaneous use by separate solvers operating on
-// different root projects. This architecture may change in the future.
-func NewSourceManager(an ProjectAnalyzer, cachedir string, force bool) (*SourceMgr, error) {
+// gps's SourceManager is intended to be threadsafe (if it's not, please file a
+// bug!). It should be safe to reuse across concurrent solving runs, even on
+// unrelated projects.
+func NewSourceManager(an ProjectAnalyzer, cachedir string) (*SourceMgr, error) {
if an == nil {
return nil, fmt.Errorf("a ProjectAnalyzer must be provided to the SourceManager")
}
@@ -121,17 +119,24 @@
glpath := filepath.Join(cachedir, "sm.lock")
_, err = os.Stat(glpath)
- if err == nil && !force {
- return nil, fmt.Errorf("cache lock file %s exists - another process crashed or is still running?", glpath)
+ if err == nil {
+ return nil, CouldNotCreateLockError{
+ Path: glpath,
+ Err: fmt.Errorf("cache lock file %s exists - another process crashed or is still running?", glpath),
+ }
}
- _, err = os.OpenFile(glpath, os.O_CREATE|os.O_RDONLY, 0700) // is 0700 sane for this purpose?
+ fi, err := os.OpenFile(glpath, os.O_CREATE|os.O_EXCL, 0600) // is 0600 sane for this purpose?
if err != nil {
- return nil, fmt.Errorf("failed to create global cache lock file at %s with err %s", glpath, err)
+ return nil, CouldNotCreateLockError{
+ Path: glpath,
+ Err: fmt.Errorf("err on attempting to create global cache lock: %s", err),
+ }
}
return &SourceMgr{
cachedir: cachedir,
+ lf: fi,
srcs: make(map[string]source),
an: an,
dxt: pathDeducerTrie(),
@@ -139,8 +144,18 @@
}, nil
}
+type CouldNotCreateLockError struct {
+ Path string
+ Err error
+}
+
+func (e CouldNotCreateLockError) Error() string {
+ return e.Err.Error()
+}
+
// Release lets go of any locks held by the SourceManager.
func (sm *SourceMgr) Release() {
+ sm.lf.Close()
os.Remove(filepath.Join(sm.cachedir, "sm.lock"))
}
diff --git a/vendor/github.com/sdboyer/gps/source_test.go b/vendor/github.com/sdboyer/gps/source_test.go
index ffee963..787e573 100644
--- a/vendor/github.com/sdboyer/gps/source_test.go
+++ b/vendor/github.com/sdboyer/gps/source_test.go
@@ -86,7 +86,7 @@
SortForUpgrade(vlist)
evl := []Version{
NewVersion("1.0.0").Is(Revision("30605f6ac35fcb075ad0bfa9296f90a7d891523e")),
- NewBranch("master").Is(Revision("30605f6ac35fcb075ad0bfa9296f90a7d891523e")),
+ newDefaultBranch("master").Is(Revision("30605f6ac35fcb075ad0bfa9296f90a7d891523e")),
NewBranch("test").Is(Revision("30605f6ac35fcb075ad0bfa9296f90a7d891523e")),
}
if !reflect.DeepEqual(vlist, evl) {
@@ -147,6 +147,10 @@
if ident != un {
t.Errorf("Expected %s as source ident, got %s", un, ident)
}
+ evl := []Version{
+ NewVersion("1.0.0").Is(Revision("matt@mattfarina.com-20150731135137-pbphasfppmygpl68")),
+ newDefaultBranch("(default)").Is(Revision("matt@mattfarina.com-20150731135137-pbphasfppmygpl68")),
+ }
// check that an expected rev is present
is, err := src.revisionPresentIn(Revision("matt@mattfarina.com-20150731135137-pbphasfppmygpl68"))
@@ -168,12 +172,12 @@
t.Errorf("bzrSource.listVersions() should have set the upstream and cache existence bits for found")
}
- if len(vlist) != 1 {
- t.Errorf("bzr test repo should've produced one version, got %v", len(vlist))
+ if len(vlist) != 2 {
+ t.Errorf("bzr test repo should've produced two versions, got %v", len(vlist))
} else {
- v := NewVersion("1.0.0").Is(Revision("matt@mattfarina.com-20150731135137-pbphasfppmygpl68"))
- if vlist[0] != v {
- t.Errorf("bzr pair fetch reported incorrect first version, got %s", vlist[0])
+ SortForUpgrade(vlist)
+ if !reflect.DeepEqual(vlist, evl) {
+ t.Errorf("bzr version list was not what we expected:\n\t(GOT): %s\n\t(WNT): %s", vlist, evl)
}
}
@@ -190,12 +194,12 @@
t.Errorf("bzrSource.listVersions() should have set the upstream and cache existence bits for found")
}
- if len(vlist) != 1 {
- t.Errorf("bzr test repo should've produced one version, got %v", len(vlist))
+ if len(vlist) != 2 {
+ t.Errorf("bzr test repo should've produced two versions, got %v", len(vlist))
} else {
- v := NewVersion("1.0.0").Is(Revision("matt@mattfarina.com-20150731135137-pbphasfppmygpl68"))
- if vlist[0] != v {
- t.Errorf("bzr pair fetch reported incorrect first version, got %s", vlist[0])
+ SortForUpgrade(vlist)
+ if !reflect.DeepEqual(vlist, evl) {
+ t.Errorf("bzr version list was not what we expected:\n\t(GOT): %s\n\t(WNT): %s", vlist, evl)
}
}
@@ -225,94 +229,111 @@
}
}
- n := "bitbucket.org/mattfarina/testhgrepo"
- un := "https://" + n
- u, err := url.Parse(un)
- if err != nil {
- t.Errorf("URL was bad, lolwut? errtext: %s", err)
- rf()
- t.FailNow()
- }
- mb := maybeHgSource{
- url: u,
- }
+ tfunc := func(n string, evl []Version) {
+ un := "https://" + n
+ u, err := url.Parse(un)
+ if err != nil {
+ t.Errorf("URL was bad, lolwut? errtext: %s", err)
+ return
+ }
+ mb := maybeHgSource{
+ url: u,
+ }
- isrc, ident, err := mb.try(cpath, naiveAnalyzer{})
- if err != nil {
- t.Errorf("Unexpected error while setting up hgSource for test repo: %s", err)
- rf()
- t.FailNow()
- }
- src, ok := isrc.(*hgSource)
- if !ok {
- t.Errorf("Expected a hgSource, got a %T", isrc)
- rf()
- t.FailNow()
- }
- if ident != un {
- t.Errorf("Expected %s as source ident, got %s", un, ident)
- }
+ isrc, ident, err := mb.try(cpath, naiveAnalyzer{})
+ if err != nil {
+ t.Errorf("Unexpected error while setting up hgSource for test repo: %s", err)
+ return
+ }
+ src, ok := isrc.(*hgSource)
+ if !ok {
+ t.Errorf("Expected a hgSource, got a %T", isrc)
+ return
+ }
+ if ident != un {
+ t.Errorf("Expected %s as source ident, got %s", un, ident)
+ }
- // check that an expected rev is present
- is, err := src.revisionPresentIn(Revision("d680e82228d206935ab2eaa88612587abe68db07"))
- if err != nil {
- t.Errorf("Unexpected error while checking revision presence: %s", err)
- } else if !is {
- t.Errorf("Revision that should exist was not present")
- }
+ // check that an expected rev is present
+ is, err := src.revisionPresentIn(Revision("103d1bddef2199c80aad7c42041223083d613ef9"))
+ if err != nil {
+ t.Errorf("Unexpected error while checking revision presence: %s", err)
+ } else if !is {
+ t.Errorf("Revision that should exist was not present")
+ }
- vlist, err := src.listVersions()
- if err != nil {
- t.Errorf("Unexpected error getting version pairs from hg repo: %s", err)
- }
- evl := []Version{
- NewVersion("1.0.0").Is(Revision("d680e82228d206935ab2eaa88612587abe68db07")),
- NewBranch("test").Is(Revision("6c44ee3fe5d87763616c19bf7dbcadb24ff5a5ce")),
- }
+ vlist, err := src.listVersions()
+ if err != nil {
+ t.Errorf("Unexpected error getting version pairs from hg repo: %s", err)
+ }
- if src.ex.s&existsUpstream|existsInCache != existsUpstream|existsInCache {
- t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for search")
- }
- if src.ex.f&existsUpstream|existsInCache != existsUpstream|existsInCache {
- t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for found")
- }
+ if src.ex.s&existsUpstream|existsInCache != existsUpstream|existsInCache {
+ t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for search")
+ }
+ if src.ex.f&existsUpstream|existsInCache != existsUpstream|existsInCache {
+ t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for found")
+ }
- if len(vlist) != 2 {
- t.Errorf("hg test repo should've produced one version, got %v", len(vlist))
- } else {
- SortForUpgrade(vlist)
- if !reflect.DeepEqual(vlist, evl) {
- t.Errorf("Version list was not what we expected:\n\t(GOT): %s\n\t(WNT): %s", vlist, evl)
+ if len(vlist) != len(evl) {
+ t.Errorf("hg test repo should've produced %v versions, got %v", len(evl), len(vlist))
+ } else {
+ SortForUpgrade(vlist)
+ if !reflect.DeepEqual(vlist, evl) {
+ t.Errorf("Version list was not what we expected:\n\t(GOT): %s\n\t(WNT): %s", vlist, evl)
+ }
+ }
+
+ // Run again, this time to ensure cache outputs correctly
+ vlist, err = src.listVersions()
+ if err != nil {
+ t.Errorf("Unexpected error getting version pairs from hg repo: %s", err)
+ }
+
+ if src.ex.s&existsUpstream|existsInCache != existsUpstream|existsInCache {
+ t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for search")
+ }
+ if src.ex.f&existsUpstream|existsInCache != existsUpstream|existsInCache {
+ t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for found")
+ }
+
+ if len(vlist) != len(evl) {
+ t.Errorf("hg test repo should've produced %v versions, got %v", len(evl), len(vlist))
+ } else {
+ SortForUpgrade(vlist)
+ if !reflect.DeepEqual(vlist, evl) {
+ t.Errorf("Version list was not what we expected:\n\t(GOT): %s\n\t(WNT): %s", vlist, evl)
+ }
+ }
+
+ // recheck that rev is present, this time interacting with cache differently
+ is, err = src.revisionPresentIn(Revision("103d1bddef2199c80aad7c42041223083d613ef9"))
+ if err != nil {
+ t.Errorf("Unexpected error while re-checking revision presence: %s", err)
+ } else if !is {
+ t.Errorf("Revision that should exist was not present on re-check")
}
}
- // Run again, this time to ensure cache outputs correctly
- vlist, err = src.listVersions()
- if err != nil {
- t.Errorf("Unexpected error getting version pairs from hg repo: %s", err)
- }
+ // simultaneously run for both the repo with and without the magic bookmark
+ donech := make(chan struct{})
+ go func() {
+ tfunc("bitbucket.org/sdboyer/withbm", []Version{
+ NewVersion("v1.0.0").Is(Revision("aa110802a0c64195d0a6c375c9f66668827c90b4")),
+ newDefaultBranch("@").Is(Revision("b10d05d581e5401f383e48ccfeb84b48fde99d06")),
+ NewBranch("another").Is(Revision("b10d05d581e5401f383e48ccfeb84b48fde99d06")),
+ NewBranch("default").Is(Revision("3d466f437f6616da594bbab6446cc1cb4328d1bb")),
+ NewBranch("newbranch").Is(Revision("5e2a01be9aee942098e44590ae545c7143da9675")),
+ })
+ close(donech)
+ }()
- if src.ex.s&existsUpstream|existsInCache != existsUpstream|existsInCache {
- t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for search")
- }
- if src.ex.f&existsUpstream|existsInCache != existsUpstream|existsInCache {
- t.Errorf("hgSource.listVersions() should have set the upstream and cache existence bits for found")
- }
+ tfunc("bitbucket.org/sdboyer/nobm", []Version{
+ NewVersion("v1.0.0").Is(Revision("aa110802a0c64195d0a6c375c9f66668827c90b4")),
+ newDefaultBranch("default").Is(Revision("3d466f437f6616da594bbab6446cc1cb4328d1bb")),
+ NewBranch("another").Is(Revision("b10d05d581e5401f383e48ccfeb84b48fde99d06")),
+ NewBranch("newbranch").Is(Revision("5e2a01be9aee942098e44590ae545c7143da9675")),
+ })
- if len(vlist) != 2 {
- t.Errorf("hg test repo should've produced one version, got %v", len(vlist))
- } else {
- SortForUpgrade(vlist)
- if !reflect.DeepEqual(vlist, evl) {
- t.Errorf("Version list was not what we expected:\n\t(GOT): %s\n\t(WNT): %s", vlist, evl)
- }
- }
-
- // recheck that rev is present, this time interacting with cache differently
- is, err = src.revisionPresentIn(Revision("d680e82228d206935ab2eaa88612587abe68db07"))
- if err != nil {
- t.Errorf("Unexpected error while re-checking revision presence: %s", err)
- } else if !is {
- t.Errorf("Revision that should exist was not present on re-check")
- }
+ <-donech
+ rf()
}
diff --git a/vendor/github.com/sdboyer/gps/types.go b/vendor/github.com/sdboyer/gps/types.go
index 11221e3..33b57f9 100644
--- a/vendor/github.com/sdboyer/gps/types.go
+++ b/vendor/github.com/sdboyer/gps/types.go
@@ -211,12 +211,6 @@
}
}
-//type byImportPath []Package
-
-//func (s byImportPath) Len() int { return len(s) }
-//func (s byImportPath) Less(i, j int) bool { return s[i].ImportPath < s[j].ImportPath }
-//func (s byImportPath) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-
// completeDep (name hopefully to change) provides the whole picture of a
// dependency - the root (repo and project, since currently we assume the two
// are the same) name, a constraint, and the actual packages needed that are
diff --git a/vendor/github.com/sdboyer/gps/vcs_source.go b/vendor/github.com/sdboyer/gps/vcs_source.go
index ecded0c..338a2da 100644
--- a/vendor/github.com/sdboyer/gps/vcs_source.go
+++ b/vendor/github.com/sdboyer/gps/vcs_source.go
@@ -136,13 +136,59 @@
s.ex.s |= existsUpstream
s.ex.f |= existsUpstream
+ // pull out the HEAD rev (it's always first) so we know what branches to
+ // mark as default. This is, perhaps, not the best way to glean this, but it
+ // was good enough for git itself until 1.8.5. Also, the alternative is
+ // sniffing data out of the pack protocol, which is a separate request, and
+ // also waaaay more than we want to do right now.
+ //
+ // The cost is that we could potentially have multiple branches marked as
+ // the default. If that does occur, a later check (again, emulating git
+ // <1.8.5 behavior) further narrows the failure mode by choosing master as
+ // the sole default branch if a) master exists and b) master is one of the
+ // branches marked as a default.
+ //
+ // This all reduces the failure mode to a very narrow range of
+ // circumstances. Nevertheless, if we do end up emitting multiple
+ // default branches, it is possible that a user could end up following a
+ // non-default branch, IF:
+ //
+ // * Multiple branches match the HEAD rev
+ // * None of them are master
+ // * The solver makes it into the branch list in the version queue
+ // * The user has provided no constraint, or DefaultBranch
+ // * A branch that is not actually the default, but happens to share the
+ // rev, is lexicographically earlier than the true default branch
+ //
+ // Then the user could end up with an erroneous non-default branch in their
+ // lock file.
+ headrev := Revision(all[0][:40])
+ var onedef, multidef, defmaster bool
+
smap := make(map[string]bool)
uniq := 0
vlist = make([]Version, len(all)-1) // less 1, because always ignore HEAD
for _, pair := range all {
var v PairedVersion
if string(pair[46:51]) == "heads" {
- v = NewBranch(string(pair[52:])).Is(Revision(pair[:40])).(PairedVersion)
+ rev := Revision(pair[:40])
+
+ isdef := rev == headrev
+ n := string(pair[52:])
+ if isdef {
+ if onedef {
+ multidef = true
+ }
+ onedef = true
+ if n == "master" {
+ defmaster = true
+ }
+ }
+ v = branchVersion{
+ name: n,
+ isDefault: isdef,
+ }.Is(rev).(PairedVersion)
+
vlist[uniq] = v
uniq++
} else if string(pair[46:50]) == "tags" {
@@ -169,6 +215,20 @@
// Trim off excess from the slice
vlist = vlist[:uniq]
+ // There were multiple default branches, but one was master. So, go through
+ // and strip the default flag from all the non-master branches.
+ if multidef && defmaster {
+ for k, v := range vlist {
+ pv := v.(PairedVersion)
+ if bv, ok := pv.Unpair().(branchVersion); ok {
+ if bv.name != "master" && bv.isDefault == true {
+ bv.isDefault = false
+ vlist[k] = bv.Is(pv.Underlying())
+ }
+ }
+ }
+ }
+
// Process the version data into the cache
//
// reset the rmap and vmap, as they'll be fully repopulated by this
@@ -226,23 +286,30 @@
}
var out []byte
-
// Now, list all the tags
out, err = r.RunFromDir("bzr", "tags", "--show-ids", "-v")
if err != nil {
- return
+ return nil, fmt.Errorf("%s: %s", err, string(out))
}
all := bytes.Split(bytes.TrimSpace(out), []byte("\n"))
- // reset the rmap and vmap, as they'll be fully repopulated by this
- // TODO(sdboyer) detect out-of-sync pairings as we do this?
+ var branchrev []byte
+ branchrev, err = r.RunFromDir("bzr", "version-info", "--custom", "--template={revision_id}", "--revision=branch:.")
+ br := string(branchrev)
+ if err != nil {
+ return nil, fmt.Errorf("%s: %s", err, br)
+ }
+
+ // Both commands completed successfully, so there's no further possibility
+ // of errors. That means it's now safe to reset the rmap and vmap, as
+ // they're about to be fully repopulated.
s.dc.vMap = make(map[UnpairedVersion]Revision)
s.dc.rMap = make(map[Revision][]UnpairedVersion)
+ vlist = make([]Version, len(all)+1)
- vlist = make([]Version, len(all))
- k := 0
- for _, line := range all {
+ // Now, all the tags.
+ for k, line := range all {
idx := bytes.IndexByte(line, 32) // space
v := NewVersion(string(line[:idx]))
r := Revision(bytes.TrimSpace(line[idx:]))
@@ -250,9 +317,16 @@
s.dc.vMap[v] = r
s.dc.rMap[r] = append(s.dc.rMap[r], v)
vlist[k] = v.Is(r)
- k++
}
+ // Last, add the default branch, hardcoding the visual representation of it
+ // that bzr uses when operating in the workflow mode we're using.
+ v := newDefaultBranch("(default)")
+ rev := Revision(string(branchrev))
+ s.dc.vMap[v] = rev
+ s.dc.rMap[rev] = append(s.dc.rMap[rev], v)
+ vlist[len(vlist)-1] = v.Is(rev)
+
// Cache is now in sync with upstream's version list
s.cvsync = true
return
@@ -301,7 +375,7 @@
// Now, list all the tags
out, err = r.RunFromDir("hg", "tags", "--debug", "--verbose")
if err != nil {
- return
+ return nil, fmt.Errorf("%s: %s", err, string(out))
}
all := bytes.Split(bytes.TrimSpace(out), []byte("\n"))
@@ -330,30 +404,71 @@
vlist = append(vlist, v)
}
- out, err = r.RunFromDir("hg", "branches", "--debug", "--verbose")
+ // bookmarks next, because the presence of the magic @ bookmark has to
+ // determine how we handle the branches
+ var magicAt bool
+ out, err = r.RunFromDir("hg", "bookmarks", "--debug")
if err != nil {
// better nothing than partial and misleading
- vlist = nil
- return
+ return nil, fmt.Errorf("%s: %s", err, string(out))
+ }
+
+ out = bytes.TrimSpace(out)
+ if !bytes.Equal(out, []byte("no bookmarks set")) {
+ all = bytes.Split(out, []byte("\n"))
+ for _, line := range all {
+ // Trim leading spaces, and * marker if present
+ line = bytes.TrimLeft(line, " *")
+ pair := bytes.Split(line, []byte(":"))
+ // if this doesn't split exactly once, we have something weird
+ if len(pair) != 2 {
+ continue
+ }
+
+ // Split on colon; this gets us the rev and the branch plus local revno
+ idx := bytes.IndexByte(pair[0], 32) // space
+ // if it's the magic @ marker, make that the default branch
+ str := string(pair[0][:idx])
+ var v Version
+ if str == "@" {
+ magicAt = true
+ v = newDefaultBranch(str).Is(Revision(pair[1])).(PairedVersion)
+ } else {
+ v = NewBranch(str).Is(Revision(pair[1])).(PairedVersion)
+ }
+ vlist = append(vlist, v)
+ }
+ }
+
+ out, err = r.RunFromDir("hg", "branches", "-c", "--debug")
+ if err != nil {
+ // better nothing than partial and misleading
+ return nil, fmt.Errorf("%s: %s", err, string(out))
}
all = bytes.Split(bytes.TrimSpace(out), []byte("\n"))
- lbyt = []byte("(inactive)")
for _, line := range all {
- if bytes.Equal(lbyt, line[len(line)-len(lbyt):]) {
- // Skip inactive branches
- continue
- }
+ // Trim inactive and closed suffixes, if present; we represent these
+ // anyway
+ line = bytes.TrimSuffix(line, []byte(" (inactive)"))
+ line = bytes.TrimSuffix(line, []byte(" (closed)"))
// Split on colon; this gets us the rev and the branch plus local revno
pair := bytes.Split(line, []byte(":"))
idx := bytes.IndexByte(pair[0], 32) // space
- v := NewBranch(string(pair[0][:idx])).Is(Revision(pair[1])).(PairedVersion)
+ str := string(pair[0][:idx])
+ // if there was no magic @ bookmark, and this is mercurial's magic
+ // "default" branch, then mark it as default branch
+ var v Version
+ if !magicAt && str == "default" {
+ v = newDefaultBranch(str).Is(Revision(pair[1])).(PairedVersion)
+ } else {
+ v = NewBranch(str).Is(Revision(pair[1])).(PairedVersion)
+ }
vlist = append(vlist, v)
}
// reset the rmap and vmap, as they'll be fully repopulated by this
- // TODO(sdboyer) detect out-of-sync pairings as we do this?
s.dc.vMap = make(map[UnpairedVersion]Revision)
s.dc.rMap = make(map[Revision][]UnpairedVersion)
diff --git a/vendor/github.com/sdboyer/gps/version.go b/vendor/github.com/sdboyer/gps/version.go
index f288b2a..7912d1e 100644
--- a/vendor/github.com/sdboyer/gps/version.go
+++ b/vendor/github.com/sdboyer/gps/version.go
@@ -69,7 +69,23 @@
// NewBranch creates a new Version to represent a floating version (in
// general, a branch).
func NewBranch(body string) UnpairedVersion {
- return branchVersion(body)
+ return branchVersion{
+ name: body,
+ // We always set isDefault to false here, because the property is
+ // specifically designed to be internal-only: only the SourceManager
+ // gets to mark it. This is OK because nothing that client code is
+ // responsible for needs to care about has to touch it it.
+ //
+ // TODO(sdboyer) ...maybe. this just ugly.
+ isDefault: false,
+ }
+}
+
+func newDefaultBranch(body string) UnpairedVersion {
+ return branchVersion{
+ name: body,
+ isDefault: true,
+ }
}
// NewVersion creates a Semver-typed Version if the provided version string is
@@ -150,13 +166,16 @@
return none
}
-type branchVersion string
-
-func (v branchVersion) String() string {
- return string(v)
+type branchVersion struct {
+ name string
+ isDefault bool
}
-func (r branchVersion) Type() string {
+func (v branchVersion) String() string {
+ return string(v.name)
+}
+
+func (v branchVersion) Type() string {
return "branch"
}
@@ -165,10 +184,10 @@
case versionTypeUnion:
return tv.Matches(v)
case branchVersion:
- return v == tv
+ return v.name == tv.name
case versionPair:
if tv2, ok := tv.v.(branchVersion); ok {
- return tv2 == v
+ return tv2.name == v.name
}
}
return false
@@ -183,10 +202,10 @@
case versionTypeUnion:
return tc.MatchesAny(v)
case branchVersion:
- return v == tc
+ return v.name == tc.name
case versionPair:
if tc2, ok := tc.v.(branchVersion); ok {
- return tc2 == v
+ return tc2.name == v.name
}
}
@@ -202,12 +221,12 @@
case versionTypeUnion:
return tc.Intersect(v)
case branchVersion:
- if v == tc {
+ if v.name == tc.name {
return v
}
case versionPair:
if tc2, ok := tc.v.(branchVersion); ok {
- if v == tc2 {
+ if v.name == tc2.name {
return v
}
}
@@ -615,9 +634,19 @@
panic("unreachable")
}
- switch l.(type) {
- // For these, now nothing to do but alpha sort
- case Revision, branchVersion, plainVersion:
+ switch tl := l.(type) {
+ case branchVersion:
+ tr := r.(branchVersion)
+ if tl.isDefault != tr.isDefault {
+ // If they're not both defaults, then return the left val: if left
+ // is the default, then it is "less" (true) b/c we want it earlier.
+ // Else the right is the default, and so the left should be later
+ // (false).
+ return tl.isDefault
+ }
+ return l.String() < r.String()
+ case Revision, plainVersion:
+ // All that we can do now is alpha sort
return l.String() < r.String()
}
@@ -652,9 +681,19 @@
panic("unreachable")
}
- switch l.(type) {
- // For these, now nothing to do but alpha
- case Revision, branchVersion, plainVersion:
+ switch tl := l.(type) {
+ case branchVersion:
+ tr := r.(branchVersion)
+ if tl.isDefault != tr.isDefault {
+ // If they're not both defaults, then return the left val: if left
+ // is the default, then it is "less" (true) b/c we want it earlier.
+ // Else the right is the default, and so the left should be later
+ // (false).
+ return tl.isDefault
+ }
+ return l.String() < r.String()
+ case Revision, plainVersion:
+ // All that we can do now is alpha sort
return l.String() < r.String()
}
diff --git a/vendor/github.com/sdboyer/gps/version_test.go b/vendor/github.com/sdboyer/gps/version_test.go
index 394bb27..d375e77 100644
--- a/vendor/github.com/sdboyer/gps/version_test.go
+++ b/vendor/github.com/sdboyer/gps/version_test.go
@@ -10,7 +10,7 @@
v4 := NewVersion("1.0.1")
v5 := NewVersion("v2.0.5")
v6 := NewVersion("2.0.5.2")
- v7 := NewBranch("unwrapped")
+ v7 := newDefaultBranch("unwrapped")
v8 := NewVersion("20.0.5.2")
start := []Version{
@@ -32,14 +32,14 @@
edown := []Version{
v3, v4, v5, // semvers
- v1, v2, v7, // floating/branches
+ v7, v1, v2, // floating/branches
v6, v8, // plain versions
rev, // revs
}
eup := []Version{
v5, v4, v3, // semvers
- v1, v2, v7, // floating/branches
+ v7, v1, v2, // floating/branches
v6, v8, // plain versions
rev, // revs
}
diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md
index 1884de6..7b8bd86 100644
--- a/vendor/gopkg.in/yaml.v2/README.md
+++ b/vendor/gopkg.in/yaml.v2/README.md
@@ -42,7 +42,7 @@
License
-------
-The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
+The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details.
Example