Merging master onto the feat/more-cfg
diff --git a/.travis.yml b/.travis.yml
index 3d5193d..0a42c0e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,7 +17,7 @@
 # in the vendor directory. We don't need to test all dependent packages.
 # Only testing this project.
 script:
-  - GO15VENDOREXPERIMENT=1 go test -v . ./cmd ./gb ./util ./cfg
+  - GO15VENDOREXPERIMENT=1 make test
 
 notifications:
   irc: "irc.freenode.net#masterminds"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index dd05f1b..353ad8b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,12 @@
+# Release 0.9.0 (xxxx-xx-xx)
+
+- Refactored Glide
+  - Many features broken out into packages. All but `action/` can be
+    used as libraries.
+  - Support for GPM Godeps-Git has been removed
+  - Cookoo is not used anymore
+  - The `action/` package replaces `cmd/`
+
 # Release 0.8.3 (2015-12-30)
 
 - Issue #198: Instead of stopping `glide install` for a hash failures providing
@@ -19,10 +28,10 @@
 - Fixed #169: cookoo git url has auth info. Makes glide unbuildable for
   environments not setup for GitHub.
 - Fixed #180: the hash in the glide.lock file was not being properly calculated.
-- Fixed #174: glide get was causing an error when the flad --updated-vendored
+- Fixed #174: glide get was causing an error when the flag --updated-vendored
   was being used.
 - Fixed #175: glide get when the GOPATH isn't setup properly could end up in
-  an infinate loop.
+  an infinite loop.
 
 # Release 0.8.1 (2015-12-15)
 
diff --git a/Makefile b/Makefile
index 5d31ee0..7adc280 100644
--- a/Makefile
+++ b/Makefile
@@ -9,7 +9,7 @@
 	install -m 755 ./glide ${DESTDIR}/usr/local/bin/glide
 
 test:
-	go test . ./cmd ./gb
+	go test . ./gb ./path ./action ./tree ./util ./godep ./gpm ./cfg ./dependency ./importer ./msg ./repo
 
 clean:
 	rm -f ./glide.test
@@ -18,7 +18,6 @@
 
 bootstrap:
 	mkdir ./vendor
-	git clone https://github.com/Masterminds/cookoo vendor/github.com/Masterminds/cookoo
 	git clone https://github.com/Masterminds/vcs vendor/github.com/Masterminds/vcs
 	git clone https://gopkg.in/yaml.v2 vendor/gopkg.in/yaml.v2
 	git clone https://github.com/codegangsta/cli vendor/github.com/codegangsta/cli
diff --git a/cmd/about.go b/action/about.go
similarity index 80%
rename from cmd/about.go
rename to action/about.go
index a736f89..e546b6a 100644
--- a/cmd/about.go
+++ b/action/about.go
@@ -1,17 +1,9 @@
-// Package cmd provides re-usable commands for Glide.
-package cmd
+package action
 
-import (
-	"fmt"
+import "github.com/Masterminds/glide/msg"
 
-	"github.com/Masterminds/cookoo"
-)
-
-// About information for the CLI
-func About(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-
-	about := `
-Glide: The Lightweight Vendor Package Manager. Manage your vendor and vendored
+const aboutMessage = `
+Glide: Vendor Package Management for Go. Manage your vendor and vendored
 packages with ease.
 
 Name:
@@ -45,6 +37,7 @@
     OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
     THE SOFTWARE.`
 
-	fmt.Println(about)
-	return true, nil
+// About prints information about Glide.
+func About() {
+	msg.Puts(aboutMessage)
 }
diff --git a/action/about_test.go b/action/about_test.go
new file mode 100644
index 0000000..40d6fc8
--- /dev/null
+++ b/action/about_test.go
@@ -0,0 +1,21 @@
+package action
+
+import (
+	"bytes"
+	"testing"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestAbout(t *testing.T) {
+	var buf bytes.Buffer
+	old := msg.Default.Stdout
+	msg.Default.Stdout = &buf
+	About()
+
+	if buf.Len() < len(aboutMessage) {
+		t.Errorf("expected this to match aboutMessage: %q", buf.String())
+	}
+
+	msg.Default.Stdout = old
+}
diff --git a/action/create.go b/action/create.go
new file mode 100644
index 0000000..d08857d
--- /dev/null
+++ b/action/create.go
@@ -0,0 +1,183 @@
+package action
+
+import (
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/gb"
+	"github.com/Masterminds/glide/godep"
+	"github.com/Masterminds/glide/gpm"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/util"
+)
+
+// Create creates/initializes a new Glide repository.
+//
+// This will fail if a glide.yaml already exists.
+//
+// By default, this will scan the present source code directory for dependencies.
+//
+// If skipImport is set to true, this will not attempt to import from an existing
+// GPM, Godep, or GB project if one should exist. However, it will still attempt
+// to read the local source to determine required packages.
+func Create(base string, skipImport bool) {
+	glidefile := gpath.GlideFile
+	// Guard against overwrites.
+	guardYAML(glidefile)
+
+	// Guess deps
+	conf := guessDeps(base, skipImport)
+	// Write YAML
+	if err := conf.WriteFile(glidefile); err != nil {
+		msg.Die("Could not save %s: %s", glidefile, err)
+	}
+}
+
+// guardYAML fails if the given file already exists.
+//
+// This prevents an important file from being overwritten.
+func guardYAML(filename string) {
+	if _, err := os.Stat(filename); err == nil {
+		msg.Die("Cowardly refusing to overwrite existing YAML.")
+	}
+}
+
+// guessDeps attempts to resolve all of the dependencies for a given project.
+//
+// base is the directory to start with.
+// skipImport will skip running the automatic imports.
+//
+// FIXME: This function is likely a one-off that has a more standard alternative.
+// It's also long and could use a refactor.
+func guessDeps(base string, skipImport bool) *cfg.Config {
+	buildContext, err := util.GetBuildContext()
+	if err != nil {
+		msg.Die("Failed to build an import context: %s", err)
+	}
+	name := buildContext.PackageName(base)
+
+	msg.Info("Generating a YAML configuration file and guessing the dependencies")
+
+	config := new(cfg.Config)
+
+	// Get the name of the top level package
+	config.Name = name
+
+	// Import by looking at other package managers and looking over the
+	// entire directory structure.
+
+	// Attempt to import from other package managers.
+	if !skipImport {
+		msg.Info("Attempting to import from other package managers (use --skip-import to skip)")
+		deps := []*cfg.Dependency{}
+		absBase, err := filepath.Abs(base)
+		if err != nil {
+			msg.Die("Failed to resolve location of %s: %s", base, err)
+		}
+
+		if d, ok := guessImportGodep(absBase); ok {
+			msg.Info("Importing Godep configuration")
+			msg.Warn("Godep uses commit id versions. Consider using Semantic Versions with Glide")
+			deps = d
+		} else if d, ok := guessImportGPM(absBase); ok {
+			msg.Info("Importing GPM configuration")
+			deps = d
+		} else if d, ok := guessImportGB(absBase); ok {
+			msg.Info("Importing GB configuration")
+			deps = d
+		}
+
+		for _, i := range deps {
+			msg.Info("Found imported reference to %s\n", i.Name)
+			config.Imports = append(config.Imports, i)
+		}
+	}
+
+	// Resolve dependencies by looking at the tree.
+	r, err := dependency.NewResolver(base)
+	if err != nil {
+		msg.Die("Error creating a dependency resolver: %s", err)
+	}
+
+	h := &dependency.DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}}
+	r.Handler = h
+
+	sortable, err := r.ResolveLocal(false)
+	if err != nil {
+		msg.Die("Error resolving local dependencies: %s", err)
+	}
+
+	sort.Strings(sortable)
+
+	vpath := r.VendorDir
+	if !strings.HasSuffix(vpath, "/") {
+		vpath = vpath + string(os.PathSeparator)
+	}
+
+	for _, pa := range sortable {
+		n := strings.TrimPrefix(pa, vpath)
+		root := util.GetRootFromPackage(n)
+
+		if !config.HasDependency(root) {
+			msg.Info("Found reference to %s\n", n)
+			d := &cfg.Dependency{
+				Name: root,
+			}
+			subpkg := strings.TrimPrefix(n, root)
+			if len(subpkg) > 0 && subpkg != "/" {
+				d.Subpackages = []string{subpkg}
+			}
+			config.Imports = append(config.Imports, d)
+		} else {
+			subpkg := strings.TrimPrefix(n, root)
+			if len(subpkg) > 0 && subpkg != "/" {
+				subpkg = strings.TrimPrefix(subpkg, "/")
+				d := config.Imports.Get(root)
+				f := false
+				for _, v := range d.Subpackages {
+					if v == subpkg {
+						f = true
+					}
+				}
+				if !f {
+					msg.Info("Adding sub-package %s to %s\n", subpkg, root)
+					d.Subpackages = append(d.Subpackages, subpkg)
+				}
+			}
+		}
+	}
+
+	return config
+}
+
+func guessImportGodep(dir string) ([]*cfg.Dependency, bool) {
+	d, err := godep.Parse(dir)
+	if err != nil || len(d) == 0 {
+		return []*cfg.Dependency{}, false
+	}
+
+	return d, true
+}
+
+func guessImportGPM(dir string) ([]*cfg.Dependency, bool) {
+	d, err := gpm.Parse(dir)
+	if err != nil || len(d) == 0 {
+		return []*cfg.Dependency{}, false
+	}
+
+	return d, true
+}
+
+func guessImportGB(dir string) ([]*cfg.Dependency, bool) {
+	d, err := gb.Parse(dir)
+	if err != nil || len(d) == 0 {
+		return []*cfg.Dependency{}, false
+	}
+
+	return d, true
+}
diff --git a/action/debug.go b/action/debug.go
new file mode 100644
index 0000000..7b8eea6
--- /dev/null
+++ b/action/debug.go
@@ -0,0 +1,20 @@
+package action
+
+import (
+	"github.com/Masterminds/glide/msg"
+)
+
+// Debug sets the debugging flags across components.
+func Debug(on bool) {
+	msg.Default.IsDebugging = on
+}
+
+// Quiet sets the quiet flags across components.
+func Quiet(on bool) {
+	msg.Default.Quiet = on
+}
+
+// NoColor sets the color flags.
+func NoColor(on bool) {
+	msg.Default.NoColor = on
+}
diff --git a/action/doc.go b/action/doc.go
new file mode 100644
index 0000000..992faf6
--- /dev/null
+++ b/action/doc.go
@@ -0,0 +1,14 @@
+/* Package action provides implementations for every Glide command.
+
+This is not a general-purpose library. It is the main flow controller for Glide.
+
+The main glide package acts as a Facade, with this package providing the
+implementation. This package should know nothing of the command line flags or
+runtime characteristics. However, this package is allowed to control the flow
+of the application, including termination. So actions may call `msg.Die()` to
+immediately stop execution of the program.
+
+In general, actions are not required to function as library functions, nor as
+concurrency-safe functions.
+*/
+package action
diff --git a/action/ensure.go b/action/ensure.go
new file mode 100644
index 0000000..c57bbac
--- /dev/null
+++ b/action/ensure.go
@@ -0,0 +1,105 @@
+package action
+
+import (
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path"
+	"strings"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+)
+
+// EnsureConfig loads and returns a config file.
+//
+// Any error will cause an immediate exit, with an error printed to Stderr.
+func EnsureConfig() *cfg.Config {
+	yamlpath, err := gpath.Glide()
+	if err != nil {
+		msg.ExitCode(2)
+		msg.Die("Failed to find %s file in directory tree: %s", gpath.GlideFile, err)
+	}
+
+	yml, err := ioutil.ReadFile(yamlpath)
+	if err != nil {
+		msg.ExitCode(2)
+		msg.Die("Failed to load %s: %s", yamlpath, err)
+	}
+	conf, err := cfg.ConfigFromYaml(yml)
+	if err != nil {
+		msg.ExitCode(3)
+		msg.Die("Failed to parse %s: %s", yamlpath, err)
+	}
+
+	return conf
+}
+
+func EnsureCacheDir() {
+	msg.Warn("ensure.go: ensureCacheDir is not implemented.")
+}
+
+// EnsureGoVendor ensures that the Go version is correct.
+func EnsureGoVendor() {
+	// 6l was removed in 1.5, when vendoring was introduced.
+	cmd := exec.Command("go", "tool", "6l")
+	if _, err := cmd.CombinedOutput(); err == nil {
+		msg.Warn("You must install the Go 1.5 or greater toolchain to work with Glide.\n")
+		os.Exit(1)
+	}
+	if os.Getenv("GO15VENDOREXPERIMENT") != "1" {
+		msg.Warn("To use Glide, you must set GO15VENDOREXPERIMENT=1\n")
+		os.Exit(1)
+	}
+
+	// Verify the setup isn't for the old version of glide. That is, this is
+	// no longer assuming the _vendor directory as the GOPATH. Inform of
+	// the change.
+	if _, err := os.Stat("_vendor/"); err == nil {
+		msg.Warn(`Your setup appears to be for the previous version of Glide.
+Previously, vendor packages were stored in _vendor/src/ and
+_vendor was set as your GOPATH. As of Go 1.5 the go tools
+recognize the vendor directory as a location for these
+files. Glide has embraced this. Please remove the _vendor
+directory or move the _vendor/src/ directory to vendor/.` + "\n")
+		os.Exit(1)
+	}
+}
+
+// EnsureVendorDir ensures that a vendor/ directory is present in the cwd.
+func EnsureVendorDir() {
+	fi, err := os.Stat(gpath.VendorDir)
+	if err != nil {
+		msg.Debug("Creating %s", gpath.VendorDir)
+		if err := os.MkdirAll(gpath.VendorDir, os.ModeDir|0755); err != nil {
+			msg.Die("Could not create %s: %s", gpath.VendorDir, err)
+		}
+	} else if !fi.IsDir() {
+		msg.Die("Vendor is not a directory")
+	}
+}
+
+// EnsureGopath fails if GOPATH is not set, or if $GOPATH/src is missing.
+//
+// Otherwise it returns the value of GOPATH.
+func EnsureGopath() string {
+	gps := gpath.Gopaths()
+	if len(gps) == 0 {
+		msg.Die("$GOPATH is not set.")
+	}
+
+	for _, gp := range gps {
+		_, err := os.Stat(path.Join(gp, "src"))
+		if err != nil {
+			msg.Warn("%s", err)
+			continue
+		}
+		return gp
+	}
+
+	msg.Error("Could not find any of %s/src.\n", strings.Join(gps, "/src, "))
+	msg.Info("As of Glide 0.5/Go 1.5, this is required.\n")
+	msg.Die("Wihtout src, cannot continue.")
+	return ""
+}
diff --git a/action/get.go b/action/get.go
new file mode 100644
index 0000000..ce20e84
--- /dev/null
+++ b/action/get.go
@@ -0,0 +1,175 @@
+package action
+
+import (
+	"fmt"
+	"path/filepath"
+	"strings"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/repo"
+	"github.com/Masterminds/glide/util"
+)
+
+// Get fetches one or more dependencies and installs.
+//
+// This includes resolving dependency resolution and re-generating the lock file.
+func Get(names []string, installer *repo.Installer, insecure, skipRecursive bool) {
+	base := gpath.Basepath()
+	EnsureGopath()
+	EnsureVendorDir()
+	conf := EnsureConfig()
+	glidefile, err := gpath.Glide()
+	if err != nil {
+		msg.Die("Could not find Glide file: %s", err)
+	}
+
+	// Add the packages to the config.
+	if err := addPkgsToConfig(conf, names, insecure); err != nil {
+		msg.Die("Failed to get new packages: %s", err)
+	}
+
+	// Fetch the new packages. Can't resolve versions via installer.Update if
+	// get is called while the vendor/ directory is empty so we checkout
+	// everything.
+	installer.Checkout(conf, false)
+
+	// Prior to resolving dependencies we need to start working with a clone
+	// of the conf because we'll be making real changes to it.
+	confcopy := conf.Clone()
+
+	if !skipRecursive {
+		// Get all repos and update them.
+		// TODO: Can we streamline this in any way? The reason that we update all
+		// of the dependencies is that we need to re-negotiate versions. For example,
+		// if an existing dependency has the constraint >1.0 and this new package
+		// adds the constraint <2.0, then this may re-resolve the existing dependency
+		// to be between 1.0 and 2.0. But changing that dependency may then result
+		// in that dependency's dependencies changing... so we sorta do the whole
+		// thing to be safe.
+		err = installer.Update(confcopy)
+		if err != nil {
+			msg.Die("Could not update packages: %s", err)
+		}
+	}
+
+	// Set Reference
+	if err := repo.SetReference(confcopy); err != nil {
+		msg.Error("Failed to set references: %s", err)
+	}
+
+	// VendoredCleanup
+	if installer.UpdateVendored {
+		repo.VendoredCleanup(confcopy)
+	}
+
+	// Write YAML
+	if err := conf.WriteFile(glidefile); err != nil {
+		msg.Die("Failed to write glide YAML file: %s", err)
+	}
+	if !skipRecursive {
+		// Write lock
+		writeLock(conf, confcopy, base)
+	} else {
+		msg.Warn("Skipping lockfile generation because full dependency tree is not being calculated")
+	}
+}
+
+func writeLock(conf, confcopy *cfg.Config, base string) {
+	hash, err := conf.Hash()
+	if err != nil {
+		msg.Die("Failed to generate config hash. Unable to generate lock file.")
+	}
+	lock := cfg.NewLockfile(confcopy.Imports, hash)
+	if err := lock.WriteFile(filepath.Join(base, gpath.LockFile)); err != nil {
+		msg.Die("Failed to write glide lock file: %s", err)
+	}
+}
+
+// addPkgsToConfig adds the given packages to the config file.
+//
+// Along the way it:
+// - ensures that this package is not in the ignore list
+// - checks to see if this is already in the dependency list.
+// - splits version of of package name and adds the version attribute
+// - seperates repo from packages
+// - sets up insecure repo URLs where necessary
+// - generates a list of subpackages
+func addPkgsToConfig(conf *cfg.Config, names []string, insecure bool) error {
+
+	msg.Info("Preparing to install %d package.", len(names))
+
+	for _, name := range names {
+		var version string
+		parts := strings.Split(name, "#")
+		if len(parts) > 1 {
+			name = parts[0]
+			version = parts[1]
+		}
+
+		root := util.GetRootFromPackage(name)
+		if len(root) == 0 {
+			return fmt.Errorf("Package name is required for %q.", name)
+		}
+
+		if conf.HasDependency(root) {
+
+			// Check if the subpackage is present.
+			subpkg := strings.TrimPrefix(name, root)
+			subpkg = strings.TrimPrefix(subpkg, "/")
+			if subpkg != "" {
+				found := false
+				dep := conf.Imports.Get(root)
+				for _, s := range dep.Subpackages {
+					if s == subpkg {
+						found = true
+						break
+					}
+				}
+				if found {
+					msg.Warn("Package %q is already in glide.yaml. Skipping", name)
+				} else {
+					dep.Subpackages = append(dep.Subpackages, subpkg)
+					msg.Info("Adding sub-package %s to existing import %s", subpkg, root)
+				}
+			} else {
+				msg.Warn("Package %q is already in glide.yaml. Skipping", root)
+			}
+			continue
+		}
+
+		if conf.HasIgnore(root) {
+			msg.Warn("Package %q is set to be ignored in glide.yaml. Skipping", root)
+			continue
+		}
+
+		dep := &cfg.Dependency{
+			Name: root,
+		}
+
+		if version != "" {
+			dep.Reference = version
+		}
+
+		// When retriving from an insecure location set the repo to the
+		// insecure location.
+		if insecure {
+			dep.Repository = "http://" + root
+		}
+
+		subpkg := strings.TrimPrefix(name, root)
+		if len(subpkg) > 0 && subpkg != "/" {
+			dep.Subpackages = []string{strings.TrimPrefix(subpkg, "/")}
+		}
+
+		if dep.Reference != "" {
+			msg.Info("Importing %s with the version %s", dep.Name, dep.Reference)
+		} else {
+			msg.Info("Importing %s", dep.Name)
+		}
+
+		conf.Imports = append(conf.Imports, dep)
+	}
+	return nil
+}
diff --git a/action/get_test.go b/action/get_test.go
new file mode 100644
index 0000000..94d18c7
--- /dev/null
+++ b/action/get_test.go
@@ -0,0 +1,46 @@
+package action
+
+import (
+	"io/ioutil"
+	"testing"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestAddPkgsToConfig(t *testing.T) {
+	// Route output to discard so it's not displayed with the test output.
+	o := msg.Default.Stderr
+	msg.Default.Stderr = ioutil.Discard
+
+	conf := new(cfg.Config)
+	dep := new(cfg.Dependency)
+	dep.Name = "github.com/Masterminds/cookoo"
+	dep.Subpackages = append(dep.Subpackages, "convert")
+	conf.Imports = append(conf.Imports, dep)
+
+	names := []string{
+		"github.com/Masterminds/cookoo/fmt",
+		"github.com/Masterminds/semver",
+	}
+
+	addPkgsToConfig(conf, names, false)
+
+	if !conf.HasDependency("github.com/Masterminds/semver") {
+		t.Error("addPkgsToConfig failed to add github.com/Masterminds/semver")
+	}
+
+	d := conf.Imports.Get("github.com/Masterminds/cookoo")
+	found := false
+	for _, s := range d.Subpackages {
+		if s == "fmt" {
+			found = true
+		}
+	}
+	if !found {
+		t.Error("addPkgsToConfig failed to add subpackage to existing import")
+	}
+
+	// Restore messaging to original location
+	msg.Default.Stderr = o
+}
diff --git a/action/import_gb.go b/action/import_gb.go
new file mode 100644
index 0000000..6f836ff
--- /dev/null
+++ b/action/import_gb.go
@@ -0,0 +1,50 @@
+package action
+
+import (
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/gb"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+)
+
+// ImportGB imports GB dependencies into the present glide config.
+func ImportGB(dest string) {
+	base := "."
+	config := EnsureConfig()
+	if !gb.Has(base) {
+		msg.Die("There is no GB manifest to import.")
+	}
+	deps, err := gb.Parse(base)
+	if err != nil {
+		msg.Die("Failed to extract GB manifest: %s", err)
+	}
+	appendImports(deps, config)
+	writeConfigToFileOrStdout(config, dest)
+}
+
+func appendImports(deps []*cfg.Dependency, config *cfg.Config) {
+	if len(deps) == 0 {
+		msg.Info("No dependencies added.")
+		return
+	}
+
+	//Append deps to existing dependencies.
+	if err := config.AddImport(deps...); err != nil {
+		msg.Die("Failed to add imports: %s", err)
+	}
+}
+
+// writeConfigToFileOrStdout is a convenience function for import utils.
+func writeConfigToFileOrStdout(config *cfg.Config, dest string) {
+	if dest != "" {
+		if err := config.WriteFile(dest); err != nil {
+			msg.Die("Failed to write %s: %s", gpath.GlideFile, err)
+		}
+	} else {
+		o, err := config.Marshal()
+		if err != nil {
+			msg.Die("Error encoding config: %s", err)
+		}
+		msg.Default.Stdout.Write(o)
+	}
+}
diff --git a/action/import_godep.go b/action/import_godep.go
new file mode 100644
index 0000000..aafacbb
--- /dev/null
+++ b/action/import_godep.go
@@ -0,0 +1,21 @@
+package action
+
+import (
+	"github.com/Masterminds/glide/godep"
+	"github.com/Masterminds/glide/msg"
+)
+
+// ImportGodep imports a Godep file.
+func ImportGodep(dest string) {
+	base := "."
+	config := EnsureConfig()
+	if !godep.Has(base) {
+		msg.Die("No Godep data found.")
+	}
+	deps, err := godep.Parse(base)
+	if err != nil {
+		msg.Die("Failed to extract Godeps file: %s", err)
+	}
+	appendImports(deps, config)
+	writeConfigToFileOrStdout(config, dest)
+}
diff --git a/action/import_gpm.go b/action/import_gpm.go
new file mode 100644
index 0000000..b91bba5
--- /dev/null
+++ b/action/import_gpm.go
@@ -0,0 +1,21 @@
+package action
+
+import (
+	"github.com/Masterminds/glide/gpm"
+	"github.com/Masterminds/glide/msg"
+)
+
+// ImportGPM imports a GPM file.
+func ImportGPM(dest string) {
+	base := "."
+	config := EnsureConfig()
+	if !gpm.Has(base) {
+		msg.Die("No GPM Godeps file found.")
+	}
+	deps, err := gpm.Parse(base)
+	if err != nil {
+		msg.Die("Failed to extract GPM Godeps file: %s", err)
+	}
+	appendImports(deps, config)
+	writeConfigToFileOrStdout(config, dest)
+}
diff --git a/action/init.go b/action/init.go
new file mode 100644
index 0000000..b0c29da
--- /dev/null
+++ b/action/init.go
@@ -0,0 +1,11 @@
+package action
+
+import (
+	gpath "github.com/Masterminds/glide/path"
+)
+
+// Init initializes the action subsystem for handling one or more subesequent actions.
+func Init(yaml, home string) {
+	gpath.GlideFile = yaml
+	gpath.HomeDir = home
+}
diff --git a/action/install.go b/action/install.go
new file mode 100644
index 0000000..6ba06e5
--- /dev/null
+++ b/action/install.go
@@ -0,0 +1,84 @@
+package action
+
+import (
+	"io/ioutil"
+	"path/filepath"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/repo"
+)
+
+// Install installs a vendor directory based on an existing Glide configuration.
+func Install(installer *repo.Installer) {
+	base := "."
+	// Ensure GOPATH
+	EnsureGopath()
+	EnsureVendorDir()
+	conf := EnsureConfig()
+
+	// Lockfile exists
+	if !gpath.HasLock(base) {
+		msg.Info("Lock file (glide.lock) does not exist. Performing update.")
+		Update(installer, false)
+		return
+	}
+	// Load lockfile
+	lock, err := LoadLockfile(base, conf)
+	if err != nil {
+		msg.Die("Could not load lockfile.")
+	}
+
+	// Delete unused packages
+	if installer.DeleteUnused {
+		// It's unclear whether this should operate off of the lock, or off
+		// of the glide.yaml file. I'd think that doing this based on the
+		// lock would be much more reliable.
+		dependency.DeleteUnused(conf)
+	}
+
+	// Install
+	newConf, err := installer.Install(lock, conf)
+	if err != nil {
+		msg.Die("Failed to install: %s", err)
+	}
+
+	msg.Info("Setting references.")
+
+	// Set reference
+	if err := repo.SetReference(newConf); err != nil {
+		msg.Error("Failed to set references: %s (Skip to cleanup)", err)
+	}
+
+	// VendoredCleanup. This should ONLY be run if UpdateVendored was specified.
+	if installer.UpdateVendored {
+		repo.VendoredCleanup(newConf)
+	}
+}
+
+// LoadLockfile loads the contents of a glide.lock file.
+//
+// TODO: This should go in another package.
+func LoadLockfile(base string, conf *cfg.Config) (*cfg.Lockfile, error) {
+	yml, err := ioutil.ReadFile(filepath.Join(base, gpath.LockFile))
+	if err != nil {
+		return nil, err
+	}
+	lock, err := cfg.LockfileFromYaml(yml)
+	if err != nil {
+		return nil, err
+	}
+
+	hash, err := conf.Hash()
+	if err != nil {
+		return nil, err
+	}
+
+	if hash != lock.Hash {
+		msg.Warn("Lock file may be out of date. Hash check of YAML failed. You may need to run 'update'")
+	}
+
+	return lock, nil
+}
diff --git a/action/list.go b/action/list.go
new file mode 100644
index 0000000..067e3ca
--- /dev/null
+++ b/action/list.go
@@ -0,0 +1,59 @@
+package action
+
+import (
+	"path/filepath"
+	"sort"
+
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/msg"
+)
+
+// List lists all of the dependencies of the current project.
+//
+// Params:
+//  - dir (string): basedir
+//  - deep (bool): whether to do a deep scan or a shallow scan
+func List(basedir string, deep bool) {
+
+	basedir, err := filepath.Abs(basedir)
+	if err != nil {
+		msg.Die("Could not read directory: %s", err)
+	}
+
+	r, err := dependency.NewResolver(basedir)
+	if err != nil {
+		msg.Die("Could not create a resolver: %s", err)
+	}
+	h := &dependency.DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}}
+	r.Handler = h
+
+	sortable, err := r.ResolveLocal(deep)
+	if err != nil {
+		msg.Die("Error listing dependencies: %s", err)
+	}
+
+	sort.Strings(sortable)
+
+	msg.Puts("INSTALLED packages:")
+	for _, k := range sortable {
+		v, err := filepath.Rel(basedir, k)
+		if err != nil {
+			msg.Warn("Failed to Rel path: %s", err)
+			v = k
+		}
+		msg.Puts("\t%s", v)
+	}
+
+	if len(h.Missing) > 0 {
+		msg.Puts("\nMISSING packages:")
+		for _, pkg := range h.Missing {
+			msg.Puts("\t%s", pkg)
+		}
+	}
+	if len(h.Gopath) > 0 {
+		msg.Puts("\nGOPATH packages:")
+		for _, pkg := range h.Gopath {
+			msg.Puts("\t%s", pkg)
+		}
+	}
+}
diff --git a/action/list_test.go b/action/list_test.go
new file mode 100644
index 0000000..6792210
--- /dev/null
+++ b/action/list_test.go
@@ -0,0 +1,21 @@
+package action
+
+import (
+	"bytes"
+	"testing"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestList(t *testing.T) {
+	var buf bytes.Buffer
+	old := msg.Default.Stdout
+	msg.Default.PanicOnDie = true
+	msg.Default.Stdout = &buf
+	List("../", false)
+	if buf.Len() < 5 {
+		t.Error("Expected some data to be found.")
+	}
+	// TODO: We should capture and test output.
+	msg.Default.Stdout = old
+}
diff --git a/action/name.go b/action/name.go
new file mode 100644
index 0000000..77972b3
--- /dev/null
+++ b/action/name.go
@@ -0,0 +1,11 @@
+package action
+
+import (
+	"github.com/Masterminds/glide/msg"
+)
+
+// Name prints the name of the package, according to the glide.yaml file.
+func Name() {
+	conf := EnsureConfig()
+	msg.Puts(conf.Name)
+}
diff --git a/action/name_test.go b/action/name_test.go
new file mode 100644
index 0000000..244d03b
--- /dev/null
+++ b/action/name_test.go
@@ -0,0 +1,26 @@
+package action
+
+import (
+	"bytes"
+	"os"
+	"testing"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestName(t *testing.T) {
+	var buf bytes.Buffer
+	msg.Default.PanicOnDie = true
+	ostdout := msg.Default.Stdout
+	msg.Default.Stdout = &buf
+	wd, _ := os.Getwd()
+	if err := os.Chdir("../testdata/name"); err != nil {
+		t.Errorf("Failed to change directory: %s", err)
+	}
+	Name()
+	if buf.String() != "technosophos.com/x/foo\n" {
+		t.Errorf("Unexpectedly got name %q", buf.String())
+	}
+	msg.Default.Stdout = ostdout
+	os.Chdir(wd)
+}
diff --git a/action/no_vendor.go b/action/no_vendor.go
new file mode 100644
index 0000000..95a0126
--- /dev/null
+++ b/action/no_vendor.go
@@ -0,0 +1,143 @@
+package action
+
+import (
+	"os"
+	"path/filepath"
+	"strings"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+// NoVendor generates a list of source code directories, excepting `vendor/`.
+//
+// If "onlyGo" is true, only folders that have Go code in them will be returned.
+//
+// If suffix is true, this will append `/...` to every directory.
+func NoVendor(path string, onlyGo, suffix bool) {
+	// This is responsible for printing the results of noVend.
+	paths, err := noVend(path, onlyGo, suffix)
+	if err != nil {
+		msg.Error("Failed to walk file tree: %s", err)
+		msg.Warn("FIXME: NoVendor should exit with non-zero exit code.")
+		return
+	}
+
+	for _, p := range paths {
+		msg.Puts(p)
+	}
+}
+
+// noVend takes a directory and returns a list of Go-like files or directories,
+// provided the directory is not a vendor directory.
+//
+// If onlyGo is true, this will filter out all directories that do not contain
+// ".go" files.
+//
+// TODO: Should we move this to its own package?
+func noVend(path string, onlyGo, suffix bool) ([]string, error) {
+
+	info, err := os.Stat(path)
+	if err != nil {
+		return []string{}, err
+	}
+
+	if !info.IsDir() {
+		return []string{path}, nil
+	}
+
+	res := []string{}
+	f, err := os.Open(path)
+	if err != nil {
+		return res, err
+	}
+
+	fis, err := f.Readdir(0)
+	if err != nil {
+		return res, err
+	}
+
+	cur := false
+
+	for _, fi := range fis {
+		if exclude(fi) {
+			continue
+		}
+
+		full := filepath.Join(path, fi.Name())
+		if fi.IsDir() && !isVend(fi) {
+			p := "./" + full + "/..."
+			res = append(res, p)
+		} else if !fi.IsDir() && isGoish(fi) {
+			//res = append(res, full)
+			cur = true
+		}
+	}
+
+	// Filter out directories that do not contain Go code
+	if onlyGo {
+		res = hasGoSource(res, suffix)
+	}
+
+	if cur {
+		res = append(res, ".")
+	}
+
+	return res, nil
+}
+
+// hasGoSource returns a list of directories that contain Go source.
+func hasGoSource(dirs []string, suffix bool) []string {
+	suf := "/"
+	if suffix {
+		suf = "/..."
+	}
+	buf := []string{}
+	for _, d := range dirs {
+		d := filepath.Dir(d)
+		found := false
+		walker := func(p string, fi os.FileInfo, err error) error {
+			// Dumb optimization
+			if found {
+				return nil
+			}
+
+			// If the file ends with .go, report a match.
+			if strings.ToLower(filepath.Ext(p)) == ".go" {
+				found = true
+			}
+
+			return nil
+		}
+		filepath.Walk(d, walker)
+
+		if found {
+			buf = append(buf, "./"+d+suf)
+		}
+	}
+	return buf
+}
+
+// isVend returns true of this directory is a vendor directory.
+//
+// TODO: Should we return true for Godeps directory?
+func isVend(fi os.FileInfo) bool {
+	return fi.Name() == "vendor"
+}
+
+// exclude returns true if the directory should be excluded by Go toolchain tools.
+//
+// Examples: directories prefixed with '.' or '_'.
+func exclude(fi os.FileInfo) bool {
+	if strings.HasPrefix(fi.Name(), "_") {
+		return true
+	}
+	if strings.HasPrefix(fi.Name(), ".") {
+		return true
+	}
+	return false
+}
+
+// isGoish returns true if the file appears to be Go source.
+func isGoish(fi os.FileInfo) bool {
+	return filepath.Ext(fi.Name()) == ".go"
+}
diff --git a/action/no_vendor_test.go b/action/no_vendor_test.go
new file mode 100644
index 0000000..be6fed3
--- /dev/null
+++ b/action/no_vendor_test.go
@@ -0,0 +1,12 @@
+package action
+
+import (
+	"testing"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestNoVendor(t *testing.T) {
+	msg.Default.PanicOnDie = true
+	NoVendor("../testdata/nv", false, false)
+}
diff --git a/action/plugin.go b/action/plugin.go
new file mode 100644
index 0000000..66da2ad
--- /dev/null
+++ b/action/plugin.go
@@ -0,0 +1,59 @@
+package action
+
+import (
+	"os"
+	"os/exec"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+// Plugin attempts to find and execute a plugin based on a command.
+//
+// Exit code 99 means the plugin was never executed. Code 1 means the program
+// exited badly.
+func Plugin(command string, args []string) {
+
+	cwd, err := os.Getwd()
+	if err != nil {
+		msg.ExitCode(99)
+		msg.Die("Could not get working directory: %s", err)
+	}
+
+	cmd := "glide-" + command
+	var fullcmd string
+	if fullcmd, err = exec.LookPath(cmd); err != nil {
+		fullcmd = cwd + "/" + cmd
+		if _, err := os.Stat(fullcmd); err != nil {
+			msg.ExitCode(99)
+			msg.Die("Command %s does not exist.", cmd)
+		}
+	}
+
+	// Turning os.Args first argument from `glide` to `glide-command`
+	args[0] = cmd
+	// Removing the first argument (command)
+	removed := false
+	for i, v := range args {
+		if removed == false && v == command {
+			args = append(args[:i], args[i+1:]...)
+			removed = true
+		}
+	}
+	pa := os.ProcAttr{
+		Files: []*os.File{os.Stdin, os.Stdout, os.Stderr},
+		Dir:   cwd,
+	}
+
+	msg.Debug("Delegating to plugin %s (%v)\n", fullcmd, args)
+
+	proc, err := os.StartProcess(fullcmd, args, &pa)
+	if err != nil {
+		msg.Error("Failed to execute %s: %s", cmd, err)
+		os.Exit(98)
+	}
+
+	if _, err := proc.Wait(); err != nil {
+		msg.Error(err.Error())
+		os.Exit(1)
+	}
+}
diff --git a/action/plugin_test.go b/action/plugin_test.go
new file mode 100644
index 0000000..95a7b15
--- /dev/null
+++ b/action/plugin_test.go
@@ -0,0 +1,19 @@
+package action
+
+import (
+	"os"
+	"testing"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestPlugin(t *testing.T) {
+	wd, _ := os.Getwd()
+	os.Chdir("../testdata/plugin")
+	msg.Default.PanicOnDie = true
+	cmd := "hello"
+	args := []string{"a", "b"}
+	// FIXME: Trapping the panic is the nice thing to do.
+	Plugin(cmd, args)
+	os.Chdir(wd)
+}
diff --git a/action/rebuild.go b/action/rebuild.go
new file mode 100644
index 0000000..d53b774
--- /dev/null
+++ b/action/rebuild.go
@@ -0,0 +1,98 @@
+package action
+
+import (
+	"os"
+	"os/exec"
+	"path"
+	"path/filepath"
+	"strings"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+)
+
+// Rebuild rebuilds '.a' files for a project.
+//
+// Prior to Go 1.4, this could substantially reduce time on incremental compiles.
+// It remains to be seen whether this is tremendously beneficial to modern Go
+// programs.
+func Rebuild() {
+	conf := EnsureConfig()
+	vpath, err := gpath.Vendor()
+	if err != nil {
+		msg.Die("Could not get vendor path: %s", err)
+	}
+
+	msg.Info("Building dependencies.\n")
+
+	if len(conf.Imports) == 0 {
+		msg.Info("No dependencies found. Nothing built.\n")
+		return
+	}
+
+	for _, dep := range conf.Imports {
+		if err := buildDep(dep, vpath); err != nil {
+			msg.Warn("Failed to build %s: %s\n", dep.Name, err)
+		}
+	}
+}
+
+func buildDep(dep *cfg.Dependency, vpath string) error {
+	if len(dep.Subpackages) == 0 {
+		buildPath(dep.Name)
+	}
+
+	for _, pkg := range dep.Subpackages {
+		if pkg == "**" || pkg == "..." {
+			//Info("Building all packages in %s\n", dep.Name)
+			buildPath(path.Join(dep.Name, "..."))
+		} else {
+			paths, err := resolvePackages(vpath, dep.Name, pkg)
+			if err != nil {
+				msg.Warn("Error resolving packages: %s", err)
+			}
+			buildPaths(paths)
+		}
+	}
+
+	return nil
+}
+
+func resolvePackages(vpath, pkg, subpkg string) ([]string, error) {
+	sdir, _ := os.Getwd()
+	if err := os.Chdir(filepath.Join(vpath, pkg, subpkg)); err != nil {
+		return []string{}, err
+	}
+	defer os.Chdir(sdir)
+	p, err := filepath.Glob(path.Join(vpath, pkg, subpkg))
+	if err != nil {
+		return []string{}, err
+	}
+	for k, v := range p {
+		nv := strings.TrimPrefix(v, vpath)
+		p[k] = strings.TrimPrefix(nv, string(filepath.Separator))
+	}
+	return p, nil
+}
+
+func buildPaths(paths []string) error {
+	for _, path := range paths {
+		if err := buildPath(path); err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func buildPath(path string) error {
+	msg.Info("Running go build %s\n", path)
+	// . in a filepath.Join is removed so it needs to be prepended separately.
+	p := "." + string(filepath.Separator) + filepath.Join("vendor", path)
+	out, err := exec.Command("go", "install", p).CombinedOutput()
+	if err != nil {
+		msg.Warn("Failed to run 'go install' for %s: %s", path, string(out))
+	}
+	return err
+}
diff --git a/action/rebuild_test.go b/action/rebuild_test.go
new file mode 100644
index 0000000..dfcdf0e
--- /dev/null
+++ b/action/rebuild_test.go
@@ -0,0 +1,18 @@
+package action
+
+import (
+	"os"
+	"testing"
+
+	"github.com/Masterminds/glide/msg"
+)
+
+func TestRebuild(t *testing.T) {
+	msg.Default.PanicOnDie = true
+	wd, _ := os.Getwd()
+	if err := os.Chdir("../testdata/rebuild"); err != nil {
+		t.Errorf("Could not change dir: %s (%s)", err, wd)
+	}
+	Rebuild()
+	os.Chdir(wd)
+}
diff --git a/action/remove.go b/action/remove.go
new file mode 100644
index 0000000..e0978b0
--- /dev/null
+++ b/action/remove.go
@@ -0,0 +1,66 @@
+package action
+
+import (
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/repo"
+)
+
+func Remove(packages []string, inst *repo.Installer) {
+	base := gpath.Basepath()
+	EnsureGopath()
+	EnsureVendorDir()
+	conf := EnsureConfig()
+	glidefile, err := gpath.Glide()
+	if err != nil {
+		msg.Die("Could not find Glide file: %s", err)
+	}
+
+	msg.Info("Preparing to remove %d packages.", len(packages))
+	conf.Imports = rmDeps(packages, conf.Imports)
+	conf.DevImports = rmDeps(packages, conf.DevImports)
+
+	// Copy used to generate locks.
+	confcopy := conf.Clone()
+
+	confcopy.Imports = inst.List(confcopy)
+
+	if err := repo.SetReference(confcopy); err != nil {
+		msg.Error("Failed to set references: %s", err)
+	}
+
+	// TODO: Right now, there is no flag to enable this, so this will never be
+	// run. I am not sure whether we should allow this in a rm op or not.
+	if inst.UpdateVendored {
+		repo.VendoredCleanup(confcopy)
+	}
+
+	// Write glide.yaml
+	if err := conf.WriteFile(glidefile); err != nil {
+		msg.Die("Failed to write glide YAML file: %s", err)
+	}
+
+	// Write glide lock
+	writeLock(conf, confcopy, base)
+}
+
+// rmDeps returns a list of dependencies that do not contain the given pkgs.
+//
+// It generates neither an error nor a warning for a pkg that does not exist
+// in the list of deps.
+func rmDeps(pkgs []string, deps []*cfg.Dependency) []*cfg.Dependency {
+	res := []*cfg.Dependency{}
+	for _, d := range deps {
+		rem := false
+		for _, p := range pkgs {
+			if p == d.Name {
+				rem = true
+			}
+		}
+		if !rem {
+			res = append(res, d)
+		}
+	}
+	return res
+}
diff --git a/action/tree.go b/action/tree.go
new file mode 100644
index 0000000..a43a6ba
--- /dev/null
+++ b/action/tree.go
@@ -0,0 +1,32 @@
+package action
+
+import (
+	"container/list"
+	"os"
+
+	"github.com/Masterminds/glide/msg"
+	"github.com/Masterminds/glide/tree"
+	"github.com/Masterminds/glide/util"
+)
+
+// Tree prints a tree representing dependencies.
+func Tree(basedir string, showcore bool) {
+	buildContext, err := util.GetBuildContext()
+	if err != nil {
+		msg.Die("Failed to get a build context: %s", err)
+	}
+	myName := buildContext.PackageName(basedir)
+
+	if basedir == "." {
+		var err error
+		basedir, err = os.Getwd()
+		if err != nil {
+			msg.Die("Could not get working directory")
+		}
+	}
+
+	msg.Puts(myName)
+	l := list.New()
+	l.PushBack(myName)
+	tree.Display(buildContext, basedir, myName, 1, showcore, l)
+}
diff --git a/action/update.go b/action/update.go
new file mode 100644
index 0000000..c989e37
--- /dev/null
+++ b/action/update.go
@@ -0,0 +1,92 @@
+package action
+
+import (
+	"path/filepath"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/repo"
+)
+
+// Update updates repos and the lock file from the main glide yaml.
+func Update(installer *repo.Installer, skipRecursive bool) {
+	base := "."
+	EnsureGopath()
+	EnsureVendorDir()
+	conf := EnsureConfig()
+
+	installer.RootPackage = conf.Name
+
+	// Delete unused packages
+	if installer.DeleteUnused {
+		dependency.DeleteUnused(conf)
+	}
+
+	// Try to check out the initial dependencies.
+	if err := installer.Checkout(conf, false); err != nil {
+		msg.Die("Failed to do initial checkout of config: %s", err)
+	}
+
+	// Set the versions for the initial dependencies so that resolved dependencies
+	// are rooted in the correct version of the base.
+	if err := repo.SetReference(conf); err != nil {
+		msg.Die("Failed to set initial config references: %s", err)
+	}
+
+	// Prior to resolving dependencies we need to start working with a clone
+	// of the conf because we'll be making real changes to it.
+	confcopy := conf.Clone()
+
+	if !skipRecursive {
+		// Get all repos and update them.
+		err := installer.Update(confcopy)
+		if err != nil {
+			msg.Die("Could not update packages: %s", err)
+		}
+
+		// TODO: There is no support here for importing Godeps, GPM, and GB files.
+		// I think that all we really need to do now is hunt for these files, and then
+		// roll their version numbers into the config file.
+
+		// Set references. There may be no remaining references to set since the
+		// installer set them as it went to make sure it parsed the right imports
+		// from the right version of the package.
+		msg.Info("Setting references for remaining imports")
+		if err := repo.SetReference(confcopy); err != nil {
+			msg.Error("Failed to set references: %s (Skip to cleanup)", err)
+		}
+	}
+	// Vendored cleanup
+	// VendoredCleanup. This should ONLY be run if UpdateVendored was specified.
+	if installer.UpdateVendored {
+		repo.VendoredCleanup(confcopy)
+	}
+
+	// Write glide.yaml (Why? Godeps/GPM/GB?)
+	// I think we don't need to write a new Glide file because update should not
+	// change anything important. It will just generate information about
+	// transative dependencies, all of which belongs exclusively in the lock
+	// file, not the glide.yaml file.
+	// TODO(mattfarina): Detect when a new dependency has been added or removed
+	// from the project. A removed dependency should warn and an added dependency
+	// should be added to the glide.yaml file. See issue #193.
+
+	if !skipRecursive {
+		// Write lock
+		hash, err := conf.Hash()
+		if err != nil {
+			msg.Die("Failed to generate config hash. Unable to generate lock file.")
+		}
+		lock := cfg.NewLockfile(confcopy.Imports, hash)
+		if err := lock.WriteFile(filepath.Join(base, gpath.LockFile)); err != nil {
+			msg.Error("Could not write lock file to %s: %s", base, err)
+			return
+		}
+
+		msg.Info("Project relies on %d dependencies.", len(confcopy.Imports))
+	} else {
+		msg.Warn("Skipping lockfile generation because full dependency tree is not being calculated")
+	}
+}
diff --git a/cfg/config.go b/cfg/config.go
index fb62534..2aea716 100644
--- a/cfg/config.go
+++ b/cfg/config.go
@@ -3,6 +3,7 @@
 import (
 	"crypto/sha256"
 	"fmt"
+	"io/ioutil"
 	"reflect"
 	"strings"
 
@@ -49,7 +50,7 @@
 	DevImports Dependencies `yaml:"devimport,omitempty"`
 }
 
-// A transitive representation of a dependency for importing and exploting to yaml.
+// A transitive representation of a dependency for importing and exporting to yaml.
 type cf struct {
 	Name        string       `yaml:"package"`
 	Description string       `yaml:"description,omitempty"`
@@ -142,7 +143,10 @@
 // HasIgnore returns true if the given name is listed on the ignore list.
 func (c *Config) HasIgnore(name string) bool {
 	for _, v := range c.Ignore {
-		if v == name {
+
+		// Check for both a name and to make sure sub-packages are ignored as
+		// well.
+		if v == name || strings.HasPrefix(name, v+"/") {
 			return true
 		}
 	}
@@ -164,6 +168,18 @@
 	return n
 }
 
+// WriteFile writes a Glide YAML file.
+//
+// This is a convenience function that marshals the YAML and then writes it to
+// the given file. If the file exists, it will be clobbered.
+func (c *Config) WriteFile(glidepath string) error {
+	o, err := c.Marshal()
+	if err != nil {
+		return err
+	}
+	return ioutil.WriteFile(glidepath, o, 0666)
+}
+
 // DeDupe consolidates duplicate dependencies on a Config instance
 func (c *Config) DeDupe() error {
 
@@ -225,6 +241,18 @@
 	return nil
 }
 
+// AddImport appends dependencies to the import list, deduplicating as we go.
+func (c *Config) AddImport(deps ...*Dependency) error {
+	t := c.Imports
+	t = append(t, deps...)
+	t, err := t.DeDupe()
+	if err != nil {
+		return err
+	}
+	c.Imports = t
+	return nil
+}
+
 // Hash generates a sha256 hash for a given Config
 func (c *Config) Hash() (string, error) {
 	yml, err := c.Marshal()
@@ -252,7 +280,7 @@
 
 // Clone performs a deep clone of Dependencies
 func (d Dependencies) Clone() Dependencies {
-	n := make(Dependencies, 0, 1)
+	n := make(Dependencies, 0, len(d))
 	for _, v := range d {
 		n = append(n, v.Clone())
 	}
@@ -414,6 +442,18 @@
 	}
 }
 
+// HasSubpackage returns if the subpackage is present on the dependency
+func (d *Dependency) HasSubpackage(sub string) bool {
+
+	for _, v := range d.Subpackages {
+		if sub == v {
+			return true
+		}
+	}
+
+	return false
+}
+
 // Owners is a list of owners for a project.
 type Owners []*Owner
 
diff --git a/cfg/lock.go b/cfg/lock.go
index 921fe33..bbb959f 100644
--- a/cfg/lock.go
+++ b/cfg/lock.go
@@ -1,6 +1,7 @@
 package cfg
 
 import (
+	"io/ioutil"
 	"sort"
 	"strings"
 	"time"
@@ -32,6 +33,18 @@
 	return yml, nil
 }
 
+// WriteFile writes a Glide lock file.
+//
+// This is a convenience function that marshals the YAML and then writes it to
+// the given file. If the file exists, it will be clobbered.
+func (lf *Lockfile) WriteFile(lockpath string) error {
+	o, err := lf.Marshal()
+	if err != nil {
+		return err
+	}
+	return ioutil.WriteFile(lockpath, o, 0666)
+}
+
 type Locks []*Lock
 
 // Len returns the length of the Locks. This is needed for sorting with
diff --git a/cmd/drop_shell.go b/cmd/drop_shell.go
deleted file mode 100644
index ef65231..0000000
--- a/cmd/drop_shell.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"os"
-	"os/exec"
-
-	"github.com/Masterminds/cookoo"
-)
-
-// DropToShell executes a glide plugin. A command that's implemented by
-// another application is executed in a similar manner to the way git commands
-// work. For example, 'glide foo' would try to execute the application glide-foo.
-// Params:
-//   - command: the name of the command to attempt executing.
-func DropToShell(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	args := c.Get("os.Args", nil).([]string)
-	command := p.Get("command", "").(string)
-
-	if len(args) == 0 {
-		return nil, fmt.Errorf("Could not get os.Args.")
-	}
-
-	cwd, err := os.Getwd()
-	if err != nil {
-		return nil, err
-	}
-
-	projpath := cwd
-	if tmp := os.Getenv("GLIDE_PROJECT"); len(tmp) != 0 {
-		projpath = tmp
-	}
-
-	cmd := "glide-" + command
-	var fullcmd string
-	if fullcmd, err = exec.LookPath(cmd); err != nil {
-		fullcmd = projpath + "/" + cmd
-		if _, err := os.Stat(fullcmd); err != nil {
-			return nil, fmt.Errorf("Command %s does not exist.", cmd)
-		}
-	}
-
-	// Turning os.Args first argument from `glide` to `glide-command`
-	args[0] = cmd
-	// Removing the first argument (command)
-	removed := false
-	for i, v := range args {
-		if removed == false && v == command {
-			args = append(args[:i], args[i+1:]...)
-			removed = true
-		}
-	}
-	pa := os.ProcAttr{
-		Files: []*os.File{os.Stdin, os.Stdout, os.Stderr},
-		Dir:   cwd,
-	}
-
-	fmt.Printf("Delegating to plugin %s (%v)\n", fullcmd, args)
-
-	proc, err := os.StartProcess(fullcmd, args, &pa)
-	if err != nil {
-		return nil, err
-	}
-
-	if _, err := proc.Wait(); err != nil {
-		return nil, err
-	}
-	return nil, nil
-}
diff --git a/cmd/flatten.go b/cmd/flatten.go
deleted file mode 100644
index 9ae94f7..0000000
--- a/cmd/flatten.go
+++ /dev/null
@@ -1,479 +0,0 @@
-package cmd
-
-import (
-	"io/ioutil"
-	"os"
-	"path"
-	"path/filepath"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-	"github.com/Masterminds/glide/util"
-	"github.com/Masterminds/semver"
-)
-
-// Flatten recurses through all dependent packages and flattens to a top level.
-//
-// Flattening involves determining a tree's dependencies and flattening them
-// into a single large list.
-//
-// Params:
-//	- packages ([]string): The packages to read. If this is empty, it reads all
-//		packages.
-//	- force (bool): force vcs updates.
-//	- conf (*cfg.Config): The configuration.
-//
-// Returns:
-//
-func Flatten(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	conf := p.Get("conf", &cfg.Config{}).(*cfg.Config)
-	skip := p.Get("skip", false).(bool)
-	home := p.Get("home", "").(string)
-	cache := p.Get("cache", false).(bool)
-	cacheGopath := p.Get("cacheGopath", false).(bool)
-	useGopath := p.Get("useGopath", false).(bool)
-
-	if skip {
-		Warn("Skipping lockfile generation because full dependency tree is not being calculated")
-		return conf, nil
-	}
-	packages := p.Get("packages", []string{}).([]string)
-
-	// Operate on a clone of the conf so any changes don't impact later operations.
-	// This is a deep clone so dependencies are also cloned.
-	confcopy := conf.Clone()
-
-	// Generate a hash of the conf for later use in lockfile generation.
-	hash, err := conf.Hash()
-	if err != nil {
-		return conf, err
-	}
-
-	// When packages are passed around with a #version on the end it needs
-	// to be stripped.
-	for k, v := range packages {
-		parts := strings.Split(v, "#")
-		packages[k] = parts[0]
-	}
-
-	force := p.Get("force", true).(bool)
-	vend, _ := VendorPath(c)
-
-	// If no packages are supplied, we do them all.
-	if len(packages) == 0 {
-		packages = make([]string, len(confcopy.Imports))
-		for i, v := range confcopy.Imports {
-			packages[i] = v.Name
-		}
-	}
-
-	// Build an initial dependency map.
-	deps := make(map[string]*cfg.Dependency, len(confcopy.Imports))
-	for _, imp := range confcopy.Imports {
-		deps[imp.Name] = imp
-	}
-
-	f := &flattening{confcopy, vend, vend, deps, packages}
-
-	// The assumption here is that once something has been scanned once in a
-	// run, there is no need to scan it again.
-	scanned := map[string]bool{}
-	err = recFlatten(f, force, home, cache, cacheGopath, useGopath, scanned)
-	if err != nil {
-		return confcopy, err
-	}
-	err = confcopy.DeDupe()
-	if err != nil {
-		return confcopy, err
-	}
-	flattenSetRefs(f)
-	Info("Project relies on %d dependencies.", len(deps))
-
-	c.Put("Lockfile", cfg.LockfileFromMap(deps, hash))
-
-	exportFlattenedDeps(confcopy, deps)
-
-	return confcopy, err
-}
-
-func exportFlattenedDeps(conf *cfg.Config, in map[string]*cfg.Dependency) {
-	out := make([]*cfg.Dependency, len(in))
-	i := 0
-	for _, v := range in {
-		out[i] = v
-		i++
-	}
-	conf.Imports = out
-}
-
-type flattening struct {
-	conf *cfg.Config
-	// Top vendor path, e.g. project/vendor
-	top string
-	// Current path
-	curr string
-	// Built list of dependencies
-	deps map[string]*cfg.Dependency
-	// Dependencies that need to be scanned.
-	scan []string
-}
-
-// Hack: Cache record of updates so we don't have to keep doing git pulls.
-var updateCache = map[string]bool{}
-
-// refFlatten recursively flattens the vendor tree.
-func recFlatten(f *flattening, force bool, home string, cache, cacheGopath, useGopath bool, scanned map[string]bool) error {
-	Debug("---> Inspecting %s for changes (%d packages).\n", f.curr, len(f.scan))
-	for _, imp := range f.scan {
-		Debug("----> Scanning %s", imp)
-		base := filepath.Join(f.top, filepath.FromSlash(imp))
-		mod := []string{}
-		if m, ok := mergeGlide(base, imp, f); ok {
-			mod = m
-		} else if m, ok = mergeGodep(base, imp, f); ok {
-			mod = m
-		} else if m, ok = mergeGPM(base, imp, f); ok {
-			mod = m
-		} else if m, ok = mergeGb(base, imp, f); ok {
-			mod = m
-		} else if m, ok = mergeGuess(base, imp, f, scanned); ok {
-			mod = m
-		}
-
-		if len(mod) > 0 {
-			Debug("----> Updating all dependencies for %q (%d)", imp, len(mod))
-			flattenGlideUp(f, base, home, force, cache, cacheGopath, useGopath)
-			f2 := &flattening{
-				conf: f.conf,
-				top:  f.top,
-				curr: base,
-				deps: f.deps,
-				scan: mod}
-			recFlatten(f2, force, home, cache, cacheGopath, useGopath, scanned)
-		}
-	}
-
-	return nil
-}
-
-// flattenGlideUp does a glide update in the middle of a flatten operation.
-//
-// While this is expensive, it is also necessary to make sure we have the
-// correct version of all dependencies. We might be able to simplify by
-// marking packages dirty when they are added.
-func flattenGlideUp(f *flattening, base, home string, force, cache, cacheGopath, useGopath bool) error {
-	//vdir := path.Join(base, "vendor")
-	for _, imp := range f.deps {
-		// If the top package name in the glide.yaml file is present in the deps
-		// skip it because we already have it.
-		if imp.Name == f.conf.Name {
-			continue
-		}
-		wd := filepath.Join(f.top, filepath.FromSlash(imp.Name))
-
-		if updateCache[imp.Name] {
-			Debug("----> Already updated %s", imp.Name)
-			continue
-		}
-
-		Debug("Getting project %s (%s)\n", imp.Name, wd)
-
-		if err := VcsUpdate(imp, f.top, home, force, cache, cacheGopath, useGopath); err != nil {
-			// We can still go on just fine even if this fails.
-			Warn("Skipped getting %s: %s\n", imp.Name, err)
-			continue
-		}
-		updateCache[imp.Name] = true
-
-		// If a revision has been set use it.
-		err := VcsVersion(imp, f.top)
-		if err != nil {
-			Warn("Problem setting version on %s: %s\n", imp.Name, err)
-		}
-	}
-
-	return nil
-}
-
-// Set the references for all packages after a flatten is completed.
-func flattenSetRefs(f *flattening) {
-	Debug("Setting final version for %d dependencies.", len(f.deps))
-	for _, imp := range f.deps {
-		if err := VcsVersion(imp, f.top); err != nil {
-			Warn("Problem setting version on %s: %s (flatten)\n", imp.Name, err)
-		}
-	}
-}
-
-func mergeGlide(dir, name string, f *flattening) ([]string, bool) {
-	deps := f.deps
-	vend := f.top
-	gp := path.Join(dir, "glide.yaml")
-	if _, err := os.Stat(gp); err != nil {
-		return []string{}, false
-	}
-
-	yml, err := ioutil.ReadFile(gp)
-	if err != nil {
-		Warn("Found glide file %q, but can't read: %s", gp, err)
-		return []string{}, false
-	}
-
-	conf, err := cfg.ConfigFromYaml(yml)
-	if err != nil {
-		Warn("Found glide file %q, but can't use it: %s", gp, err)
-		return []string{}, false
-	}
-
-	Info("Found glide.yaml in %s", gp)
-
-	return mergeDeps(deps, conf.Imports, vend, f), true
-}
-
-// listGodep appends Godeps entries to the deps.
-//
-// It returns true if any dependencies were found (even if not added because
-// they are duplicates).
-func mergeGodep(dir, name string, f *flattening) ([]string, bool) {
-	deps := f.deps
-	vend := f.top
-	Debug("Looking in %s/Godeps/ for a Godeps.json file.\n", dir)
-	d, err := parseGodepGodeps(dir)
-	if err != nil {
-		Warn("Looking for Godeps: %s\n", err)
-		return []string{}, false
-	} else if len(d) == 0 {
-		return []string{}, false
-	}
-
-	Info("Found Godeps.json file for %q", name)
-	return mergeDeps(deps, d, vend, f), true
-}
-
-// listGb merges GB dependencies into the deps.
-func mergeGb(dir, pkg string, f *flattening) ([]string, bool) {
-	deps := f.deps
-	vend := f.top
-	Debug("Looking in %s/vendor/ for a manifest file.\n", dir)
-	d, err := parseGbManifest(dir)
-	if err != nil || len(d) == 0 {
-		return []string{}, false
-	}
-	Info("Found gb manifest file for %q", pkg)
-	return mergeDeps(deps, d, vend, f), true
-}
-
-// mergeGPM merges GPM Godeps files into deps.
-func mergeGPM(dir, pkg string, f *flattening) ([]string, bool) {
-	deps := f.deps
-	vend := f.top
-	d, err := parseGPMGodeps(dir)
-	if err != nil || len(d) == 0 {
-		return []string{}, false
-	}
-	Info("Found GPM file for %q", pkg)
-	return mergeDeps(deps, d, vend, f), true
-}
-
-// mergeGuess guesses dependencies and merges.
-//
-// This always returns true because it always handles the job of searching
-// for dependencies. So generally it should be the last merge strategy
-// that you try.
-func mergeGuess(dir, pkg string, f *flattening, scanned map[string]bool) ([]string, bool) {
-	deps := f.deps
-	Info("Scanning %s for dependencies.", pkg)
-	buildContext, err := util.GetBuildContext()
-	if err != nil {
-		Warn("Could not scan package %q: %s", pkg, err)
-		return []string{}, false
-	}
-
-	res := []string{}
-
-	if _, err := os.Stat(dir); err != nil {
-		Warn("Directory is missing: %s", dir)
-		return res, true
-	}
-
-	d := walkDeps(buildContext, dir, pkg)
-	for _, oname := range d {
-		if _, ok := scanned[oname]; ok {
-			//Info("===> Scanned %s already. Skipping", name)
-			continue
-		}
-		Debug("=> Scanning %s", oname)
-		name, _ := NormalizeName(oname)
-		//if _, ok := deps[name]; ok {
-		//scanned[oname] = true
-		//Debug("====> Seen %s already. Skipping", name)
-		//continue
-		//}
-		if f.conf.HasIgnore(name) {
-			Debug("==> Skipping %s because it is on the ignore list", name)
-			continue
-		}
-
-		found := findPkg(buildContext, name, dir)
-		switch found.PType {
-		case ptypeUnknown:
-			Info("==> Unknown %s (%s)", name, oname)
-			Debug("✨☆ Undownloaded dependency: %s", name)
-			repo := util.GetRootFromPackage(name)
-			nd := &cfg.Dependency{
-				Name:       name,
-				Repository: "https://" + repo,
-			}
-			deps[name] = nd
-			res = append(res, name)
-		case ptypeGoroot, ptypeCgo:
-			scanned[oname] = true
-			// Why do we break rather than continue?
-			break
-		default:
-			// We're looking for dependencies that might exist in $GOPATH
-			// but not be on vendor. We add any that are on $GOPATH.
-			if _, ok := deps[name]; !ok {
-				Debug("✨☆ GOPATH dependency: %s", name)
-				nd := &cfg.Dependency{Name: name}
-				deps[name] = nd
-				res = append(res, name)
-			}
-			scanned[oname] = true
-		}
-	}
-
-	return res, true
-}
-
-// mergeDeps merges any dependency array into deps.
-func mergeDeps(orig map[string]*cfg.Dependency, add []*cfg.Dependency, vend string, f *flattening) []string {
-	mod := []string{}
-	for _, dd := range add {
-		if f.conf.HasIgnore(dd.Name) {
-			Debug("Skipping %s because it is on the ignore list", dd.Name)
-		} else if existing, ok := orig[dd.Name]; !ok {
-			// Add it unless it's already there.
-			orig[dd.Name] = dd
-			Debug("Adding %s to the scan list", dd.Name)
-			mod = append(mod, dd.Name)
-		} else if existing.Reference == "" && dd.Reference != "" {
-			// If a nested dep has finer dependency references than outside,
-			// set the reference.
-			existing.Reference = dd.Reference
-			mod = append(mod, dd.Name)
-		} else if dd.Reference != "" && existing.Reference != "" && dd.Reference != existing.Reference {
-			// Check if one is a version and the other is a constraint. If the
-			// version is in the constraint use that.
-			dest := filepath.Join(vend, filepath.FromSlash(dd.Name))
-			repo, err := existing.GetRepo(dest)
-			if err != nil {
-				Warn("Unable to access repo for %s\n", existing.Name)
-				Info("Keeping %s %s", existing.Name, existing.Reference)
-				continue
-			}
-
-			eIsRef := repo.IsReference(existing.Reference)
-			ddIsRef := repo.IsReference(dd.Reference)
-
-			// Both are references and different ones.
-			if eIsRef && ddIsRef {
-				Warn("Conflict: %s ref is %s, but also asked for %s\n", existing.Name, existing.Reference, dd.Reference)
-				Info("Keeping %s %s", existing.Name, existing.Reference)
-			} else if eIsRef {
-				// Test ddIsRef is a constraint and if eIsRef is a semver
-				// within that
-				con, err := semver.NewConstraint(dd.Reference)
-				if err != nil {
-					Warn("Version issue for %s: '%s' is neither a reference or semantic version constraint\n", dd.Name, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-					continue
-				}
-
-				ver, err := semver.NewVersion(existing.Reference)
-				if err != nil {
-					// The existing version is not a semantic version.
-					Warn("Conflict: %s version is %s, but also asked for %s\n", existing.Name, existing.Reference, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-					continue
-				}
-
-				if con.Check(ver) {
-					Info("Keeping %s %s because it fits constraint '%s'", existing.Name, existing.Reference, dd.Reference)
-				} else {
-					Warn("Conflict: %s version is %s but does not meet constraint '%s'\n", existing.Name, existing.Reference, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-				}
-
-			} else if ddIsRef {
-				// Test eIsRef is a constraint and if ddIsRef is a semver
-				// within that
-				con, err := semver.NewConstraint(existing.Reference)
-				if err != nil {
-					Warn("Version issue for %s: '%s' is neither a reference or semantic version constraint\n", existing.Name, existing.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-					continue
-				}
-
-				ver, err := semver.NewVersion(dd.Reference)
-				if err != nil {
-					// The dd version is not a semantic version.
-					Warn("Conflict: %s version is %s, but also asked for %s\n", existing.Name, existing.Reference, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-					continue
-				}
-
-				if con.Check(ver) {
-					// Use the specific version if noted instead of the existing
-					// constraint.
-					existing.Reference = dd.Reference
-					mod = append(mod, dd.Name)
-					Info("Using %s %s because it fits constraint '%s'", existing.Name, dd.Reference, existing.Reference)
-				} else {
-					Warn("Conflict: %s semantic version constraint is %s but '%s' does not meet the constraint\n", existing.Name, existing.Reference, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-				}
-			} else {
-				// Neither is a vcs reference and both could be semantic version
-				// constraints that are different.
-
-				_, err := semver.NewConstraint(dd.Reference)
-				if err != nil {
-					// dd.Reference is not a reference or a valid constraint.
-					Warn("Version %s %s is not a reference or valid semantic version constraint\n", dd.Name, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-					continue
-				}
-
-				_, err = semver.NewConstraint(existing.Reference)
-				if err != nil {
-					// existing.Reference is not a reference or a valid constraint.
-					// We really should never end up here.
-					Warn("Version %s %s is not a reference or valid semantic version constraint\n", existing.Name, existing.Reference)
-
-					existing.Reference = dd.Reference
-					mod = append(mod, dd.Name)
-					Info("Using %s %s because it is a valid version", existing.Name, existing.Reference)
-					continue
-				}
-
-				// Both versions are constraints. Try to merge them.
-				// If either comparison has an || skip merging. That's complicated.
-				ddor := strings.Index(dd.Reference, "||")
-				eor := strings.Index(existing.Reference, "||")
-				if ddor == -1 && eor == -1 {
-					// Add the comparisons together.
-					newRef := existing.Reference + ", " + dd.Reference
-					existing.Reference = newRef
-					mod = append(mod, dd.Name)
-					Info("Combining %s semantic version constraints %s and %s", existing.Name, existing.Reference, dd.Reference)
-				} else {
-					Warn("Conflict: %s version is %s, but also asked for %s\n", existing.Name, existing.Reference, dd.Reference)
-					Info("Keeping %s %s", existing.Name, existing.Reference)
-				}
-			}
-		}
-	}
-	return mod
-}
diff --git a/cmd/gb.go b/cmd/gb.go
deleted file mode 100644
index e74e6b6..0000000
--- a/cmd/gb.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package cmd
-
-import (
-	"encoding/json"
-	"os"
-	"path/filepath"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-	"github.com/Masterminds/glide/gb"
-)
-
-func HasGbManifest(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", "", p)
-	path := filepath.Join(dir, "vendor", "manifest")
-	_, err := os.Stat(path)
-	return err == nil, nil
-}
-
-// GbManifest
-//
-// Params:
-// 	- dir (string): The directory where the manifest file is located.
-// Returns:
-//
-func GbManifest(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", ".", p)
-	return parseGbManifest(dir)
-}
-
-func parseGbManifest(dir string) ([]*cfg.Dependency, error) {
-	path := filepath.Join(dir, "vendor", "manifest")
-	if fi, err := os.Stat(path); err != nil || fi.IsDir() {
-		return []*cfg.Dependency{}, nil
-	}
-
-	Info("Found GB manifest file.\n")
-	buf := []*cfg.Dependency{}
-	file, err := os.Open(path)
-	if err != nil {
-		return buf, err
-	}
-	defer file.Close()
-
-	man := gb.Manifest{}
-
-	dec := json.NewDecoder(file)
-	if err := dec.Decode(&man); err != nil {
-		return buf, err
-	}
-
-	seen := map[string]bool{}
-
-	for _, d := range man.Dependencies {
-		pkg, sub := NormalizeName(d.Importpath)
-		if _, ok := seen[pkg]; ok {
-			if len(sub) == 0 {
-				continue
-			}
-			for _, dep := range buf {
-				if dep.Name == pkg {
-					dep.Subpackages = append(dep.Subpackages, sub)
-				}
-			}
-		} else {
-			seen[pkg] = true
-			dep := &cfg.Dependency{
-				Name:       pkg,
-				Reference:  d.Revision,
-				Repository: d.Repository,
-			}
-			if len(sub) > 0 {
-				dep.Subpackages = []string{sub}
-			}
-			buf = append(buf, dep)
-		}
-	}
-	return buf, nil
-}
diff --git a/cmd/get_imports.go b/cmd/get_imports.go
deleted file mode 100644
index 8613914..0000000
--- a/cmd/get_imports.go
+++ /dev/null
@@ -1,882 +0,0 @@
-package cmd
-
-import (
-	"encoding/json"
-	"fmt"
-	"io/ioutil"
-	"net/http"
-	"net/url"
-	"os/exec"
-	"path/filepath"
-	"sort"
-	"sync"
-	//"log"
-
-	"os"
-	"runtime"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-	"github.com/Masterminds/glide/util"
-	"github.com/Masterminds/semver"
-	v "github.com/Masterminds/vcs"
-)
-
-// Used for the fan out/in pattern used with VCS calls.
-var concurrentWorkers = 20
-
-//func init() {
-// Uncomment the line below and the log import to see the output
-// from the vcs commands executed for each project.
-//v.Logger = log.New(os.Stdout, "go-vcs", log.LstdFlags)
-//}
-
-// GetAll gets zero or more repos.
-//
-// This takes a package name, normalizes it, finds the repo, and installs it.
-// It's the workhorse behind `glide get`.
-//
-// Params:
-//	- packages ([]string): Package names to get.
-// 	- verbose (bool): default false
-//
-// Returns:
-// 	- []*Dependency: A list of constructed dependencies.
-func GetAll(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	names := p.Get("packages", []string{}).([]string)
-	conf := p.Get("conf", nil).(*cfg.Config)
-	insecure := p.Get("insecure", false).(bool)
-
-	Info("Preparing to install %d package.", len(names))
-
-	deps := []*cfg.Dependency{}
-	for _, name := range names {
-		var version string
-		parts := strings.Split(name, "#")
-		if len(parts) > 1 {
-			name = parts[0]
-			version = parts[1]
-		}
-
-		root := util.GetRootFromPackage(name)
-		if len(root) == 0 {
-			return nil, fmt.Errorf("Package name is required for %q.", name)
-		}
-
-		if conf.HasDependency(root) {
-			Warn("Package %q is already in glide.yaml. Skipping", root)
-			continue
-		}
-
-		if conf.HasIgnore(root) {
-			Warn("Package %q is set to be ignored in glide.yaml. Skipping", root)
-			continue
-		}
-
-		dep := &cfg.Dependency{
-			Name: root,
-		}
-
-		if version != "" {
-			dep.Reference = version
-		}
-
-		// When retriving from an insecure location set the repo to the
-		// insecure location.
-		if insecure {
-			dep.Repository = "http://" + root
-		}
-
-		subpkg := strings.TrimPrefix(name, root)
-		if len(subpkg) > 0 && subpkg != "/" {
-			dep.Subpackages = []string{subpkg}
-		}
-
-		if dep.Reference != "" {
-			Info("Importing %s with the version %s", dep.Name, dep.Reference)
-		} else {
-			Info("Importing %s", dep.Name)
-		}
-
-		conf.Imports = append(conf.Imports, dep)
-
-		deps = append(deps, dep)
-
-	}
-
-	Info("Walking the dependency tree to calculate versions")
-	return deps, nil
-}
-
-// UpdateImports iterates over the imported packages and updates them.
-//
-// Params:
-//
-// 	- force (bool): force packages to update (default false)
-//	- conf (*cfg.Config): The configuration
-// 	- packages([]string): The packages to update. Default is all.
-func UpdateImports(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	cfg := p.Get("conf", nil).(*cfg.Config)
-	force := p.Get("force", true).(bool)
-	plist := p.Get("packages", []string{}).([]string)
-	home := p.Get("home", "").(string)
-	cache := p.Get("cache", false).(bool)
-	cacheGopath := p.Get("cacheGopath", false).(bool)
-	useGopath := p.Get("useGopath", false).(bool)
-
-	pkgs := list2map(plist)
-	restrict := len(pkgs) > 0
-
-	cwd, err := VendorPath(c)
-	if err != nil {
-		return false, err
-	}
-
-	if len(cfg.Imports) == 0 {
-		Info("No dependencies found. Nothing updated.\n")
-		return false, nil
-	}
-
-	for _, dep := range cfg.Imports {
-		if restrict && !pkgs[dep.Name] {
-			Debug("===> Skipping %q", dep.Name)
-
-			// Even though skipping check if the package exists and has VCS info
-			// needed for other operations.
-			dest := filepath.Join(cwd, filepath.FromSlash(dep.Name))
-			if _, err := os.Stat(dest); os.IsNotExist(err) {
-				Warn("Package %s not checked out to vendor/ folder", dep.Name)
-				Error("Unable to generate accurate glide.lock because %s is missing", dep.Name)
-			} else {
-				empty, err := isDirectoryEmpty(dest)
-				_, err2 := v.DetectVcsFromFS(dest)
-				if err != nil || empty == true {
-					Warn("Package %s not checked out to vendor/ folder. Directory empty", dep.Name)
-					Error("Unable to generate accurate glide.lock because %s is missing", dep.Name)
-					continue
-				} else if empty == false && err2 == v.ErrCannotDetectVCS {
-					Warn("%s appears to be a vendored package missing version control data", dep.Name)
-					Error("Unable to generate accurate glide.lock because %s version control data is missing", dep.Name)
-				}
-			}
-
-			continue
-		}
-
-		// Hack: The updateCache global keeps us from re-updating the same
-		// dependencies when we're recursing. We cache here to prevent
-		// flattening from causing unnecessary updates.
-		updateCache[dep.Name] = true
-
-		if err := VcsUpdate(dep, cwd, home, force, cache, cacheGopath, useGopath); err != nil {
-			Warn("Update failed for %s: %s\n", dep.Name, err)
-		}
-	}
-
-	return true, nil
-}
-
-// SetReference is a command to set the VCS reference (commit id, tag, etc) for
-// a project.
-func SetReference(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	conf := p.Get("conf", nil).(*cfg.Config)
-	cwd, err := VendorPath(c)
-	if err != nil {
-		return false, err
-	}
-
-	if len(conf.Imports) == 0 {
-		Info("No references set.\n")
-		return false, nil
-	}
-	//
-	// for _, dep := range conf.Imports {
-	// 	if err := VcsVersion(dep, cwd); err != nil {
-	// 		Warn("Failed to set version on %s to %s: %s\n", dep.Name, dep.Reference, err)
-	// 	}
-	// }
-
-	done := make(chan struct{}, concurrentWorkers)
-	in := make(chan *cfg.Dependency, concurrentWorkers)
-	var wg sync.WaitGroup
-
-	for i := 0; i < concurrentWorkers; i++ {
-		go func(ch <-chan *cfg.Dependency) {
-			for {
-				select {
-				case dep := <-ch:
-					if err := VcsVersion(dep, cwd); err != nil {
-						Warn("Failed to set version on %s to %s: %s\n", dep.Name, dep.Reference, err)
-					}
-					wg.Done()
-				case <-done:
-					return
-				}
-			}
-		}(in)
-	}
-
-	for _, dep := range conf.Imports {
-		wg.Add(1)
-		in <- dep
-	}
-
-	wg.Wait()
-	// Close goroutines setting the version
-	for i := 0; i < concurrentWorkers; i++ {
-		done <- struct{}{}
-	}
-	// close(done)
-	// close(in)
-
-	return true, nil
-}
-
-// filterArchOs indicates a dependency should be filtered out because it is
-// the wrong GOOS or GOARCH.
-func filterArchOs(dep *cfg.Dependency) bool {
-	found := false
-	if len(dep.Arch) > 0 {
-		for _, a := range dep.Arch {
-			if a == runtime.GOARCH {
-				found = true
-			}
-		}
-		// If it's not found, it should be filtered out.
-		if !found {
-			return true
-		}
-	}
-
-	found = false
-	if len(dep.Os) > 0 {
-		for _, o := range dep.Os {
-			if o == runtime.GOOS {
-				found = true
-			}
-		}
-		if !found {
-			return true
-		}
-
-	}
-
-	return false
-}
-
-// VcsExists checks if the directory has a local VCS checkout.
-func VcsExists(dep *cfg.Dependency, dest string) bool {
-	repo, err := dep.GetRepo(dest)
-	if err != nil {
-		return false
-	}
-
-	return repo.CheckLocal()
-}
-
-// VcsGet figures out how to fetch a dependency, and then gets it.
-//
-// VcsGet installs into the dest.
-func VcsGet(dep *cfg.Dependency, dest, home string, cache, cacheGopath, useGopath bool) error {
-	// When not skipping the $GOPATH look in it for a copy of the package
-	if useGopath {
-		// Check if the $GOPATH has a viable version to use and if so copy to vendor
-		gps := Gopaths()
-		for _, p := range gps {
-			d := filepath.Join(p, "src", filepath.FromSlash(dep.Name))
-			if _, err := os.Stat(d); err == nil {
-				empty, err := isDirectoryEmpty(d)
-				if empty || err != nil {
-					continue
-				}
-
-				repo, err := dep.GetRepo(d)
-				if err != nil {
-					continue
-				}
-
-				// Dirty repos have uncomitted changes.
-				if repo.IsDirty() {
-					continue
-				}
-
-				// Having found a repo we copy it to vendor and update it.
-				Debug("Found %s in GOPATH at %s. Copying to %s", dep.Name, d, dest)
-				err = copyDir(d, dest)
-				if err != nil {
-					return err
-				}
-
-				// Update the repo in the vendor directory
-				Debug("Updating %s, now in the vendor path at %s", dep.Name, dest)
-				repo, err = dep.GetRepo(dest)
-				if err != nil {
-					return err
-				}
-				err = repo.Update()
-				if err != nil {
-					return err
-				}
-
-				// If there is no reference set on the dep we try to checkout
-				// the default branch.
-				if dep.Reference == "" {
-					db := defaultBranch(repo, home)
-					if db != "" {
-						err = repo.UpdateVersion(db)
-						if err != nil {
-							Debug("Attempting to set the version on %s to %s failed. Error %s", dep.Name, db, err)
-						}
-					}
-				}
-				return nil
-			}
-		}
-	}
-
-	// When opting in to cache in the GOPATH attempt to do put a copy there.
-	if cacheGopath {
-
-		// Since we didn't find an existing copy in the GOPATHs try to clone there.
-		gp := Gopath()
-		if gp != "" {
-			d := filepath.Join(gp, "src", filepath.FromSlash(dep.Name))
-			if _, err := os.Stat(d); os.IsNotExist(err) {
-				// Empty directory so we checkout out the code here.
-				Debug("Retrieving %s to %s before copying to vendor", dep.Name, d)
-				repo, err := dep.GetRepo(d)
-				if err != nil {
-					return err
-				}
-				repo.Get()
-
-				branch := findCurrentBranch(repo)
-				if branch != "" {
-					// we know the default branch so we can store it in the cache
-					var loc string
-					if dep.Repository != "" {
-						loc = dep.Repository
-					} else {
-						loc = "https://" + dep.Name
-					}
-					key, err := cacheCreateKey(loc)
-					if err == nil {
-						Debug("Saving default branch for %s", repo.Remote())
-						c := cacheRepoInfo{DefaultBranch: branch}
-						err = saveCacheRepoData(key, c, home)
-						if err == errCacheDisabled {
-							Debug("Unable to cache default branch because caching is disabled")
-						}
-					}
-				}
-
-				Debug("Copying %s from GOPATH at %s to %s", dep.Name, d, dest)
-				err = copyDir(d, dest)
-				if err != nil {
-					return err
-				}
-
-				return nil
-			}
-		}
-	}
-
-	// If opting in to caching attempt to put it in the cache folder
-	if cache {
-		// Check if the cache has a viable version and try to use that.
-		var loc string
-		if dep.Repository != "" {
-			loc = dep.Repository
-		} else {
-			loc = "https://" + dep.Name
-		}
-		key, err := cacheCreateKey(loc)
-		if err == nil {
-			d := filepath.Join(home, "cache", "src", key)
-
-			repo, err := dep.GetRepo(d)
-			if err != nil {
-				return err
-			}
-			// If the directory does not exist this is a first cache.
-			if _, err = os.Stat(d); os.IsNotExist(err) {
-				Debug("Adding %s to the cache for the first time", dep.Name)
-				err = repo.Get()
-				if err != nil {
-					return err
-				}
-				branch := findCurrentBranch(repo)
-				if branch != "" {
-					// we know the default branch so we can store it in the cache
-					var loc string
-					if dep.Repository != "" {
-						loc = dep.Repository
-					} else {
-						loc = "https://" + dep.Name
-					}
-					key, err := cacheCreateKey(loc)
-					if err == nil {
-						Debug("Saving default branch for %s", repo.Remote())
-						c := cacheRepoInfo{DefaultBranch: branch}
-						err = saveCacheRepoData(key, c, home)
-						if err == errCacheDisabled {
-							Debug("Unable to cache default branch because caching is disabled")
-						} else if err != nil {
-							Debug("Error saving %s to cache. Error: %s", repo.Remote(), err)
-						}
-					}
-				}
-
-			} else {
-				Debug("Updating %s in the cache", dep.Name)
-				err = repo.Update()
-				if err != nil {
-					return err
-				}
-			}
-
-			Debug("Copying %s from the cache to %s", dep.Name, dest)
-			err = copyDir(d, dest)
-			if err != nil {
-				return err
-			}
-
-			return nil
-		} else {
-			Warn("Cache key generation error: %s", err)
-		}
-	}
-
-	// If unable to cache pull directly into the vendor/ directory.
-	repo, err := dep.GetRepo(dest)
-	if err != nil {
-		return err
-	}
-
-	gerr := repo.Get()
-
-	// Attempt to cache the default branch
-	branch := findCurrentBranch(repo)
-	if branch != "" {
-		// we know the default branch so we can store it in the cache
-		var loc string
-		if dep.Repository != "" {
-			loc = dep.Repository
-		} else {
-			loc = "https://" + dep.Name
-		}
-		key, err := cacheCreateKey(loc)
-		if err == nil {
-			Debug("Saving default branch for %s", repo.Remote())
-			c := cacheRepoInfo{DefaultBranch: branch}
-			err = saveCacheRepoData(key, c, home)
-			if err == errCacheDisabled {
-				Debug("Unable to cache default branch because caching is disabled")
-			} else if err != nil {
-				Debug("Error saving %s to cache - Error: %s", repo.Remote(), err)
-			}
-		}
-	}
-
-	return gerr
-}
-
-// VcsUpdate updates to a particular checkout based on the VCS setting.
-func VcsUpdate(dep *cfg.Dependency, vend, home string, force, cache, cacheGopath, useGopath bool) error {
-	Info("Fetching updates for %s.\n", dep.Name)
-
-	if filterArchOs(dep) {
-		Info("%s is not used for %s/%s.\n", dep.Name, runtime.GOOS, runtime.GOARCH)
-		return nil
-	}
-
-	dest := filepath.Join(vend, filepath.FromSlash(dep.Name))
-	// If destination doesn't exist we need to perform an initial checkout.
-	if _, err := os.Stat(dest); os.IsNotExist(err) {
-		if err = VcsGet(dep, dest, home, cache, cacheGopath, useGopath); err != nil {
-			Warn("Unable to checkout %s\n", dep.Name)
-			return err
-		}
-	} else {
-		// At this point we have a directory for the package.
-
-		// When the directory is not empty and has no VCS directory it's
-		// a vendored files situation.
-		empty, err := isDirectoryEmpty(dest)
-		if err != nil {
-			return err
-		}
-		_, err = v.DetectVcsFromFS(dest)
-		if updatingVendored == false && empty == false && err == v.ErrCannotDetectVCS {
-			Warn("%s appears to be a vendored package. Unable to update. Consider the '--update-vendored' flag.\n", dep.Name)
-		} else {
-
-			if updatingVendored == true && empty == false && err == v.ErrCannotDetectVCS {
-				// A vendored package, no repo, and updating the vendored packages
-				// has been opted into.
-				Info("%s is a vendored package. Updating.", dep.Name)
-				err = os.RemoveAll(dest)
-				if err != nil {
-					Error("Unable to update vendored dependency %s.\n", dep.Name)
-					return err
-				} else {
-					dep.UpdateAsVendored = true
-				}
-
-				if err = VcsGet(dep, dest, home, cache, cacheGopath, useGopath); err != nil {
-					Warn("Unable to checkout %s\n", dep.Name)
-					return err
-				}
-
-				return nil
-			}
-
-			repo, err := dep.GetRepo(dest)
-
-			// Tried to checkout a repo to a path that does not work. Either the
-			// type or endpoint has changed. Force is being passed in so the old
-			// location can be removed and replaced with the new one.
-			// Warning, any changes in the old location will be deleted.
-			// TODO: Put dirty checking in on the existing local checkout.
-			if (err == v.ErrWrongVCS || err == v.ErrWrongRemote) && force == true {
-				var newRemote string
-				if len(dep.Repository) > 0 {
-					newRemote = dep.Repository
-				} else {
-					newRemote = "https://" + dep.Name
-				}
-
-				Warn("Replacing %s with contents from %s\n", dep.Name, newRemote)
-				rerr := os.RemoveAll(dest)
-				if rerr != nil {
-					return rerr
-				}
-				if err = VcsGet(dep, dest, home, cache, cacheGopath, useGopath); err != nil {
-					Warn("Unable to checkout %s\n", dep.Name)
-					return err
-				}
-			} else if err != nil {
-				return err
-			} else if repo.IsDirty() {
-				return fmt.Errorf("%s contains uncommited changes. Skipping update", dep.Name)
-			} else {
-
-				// Check if the current version is a tag or commit id. If it is
-				// and that version is already checked out we can skip updating
-				// which is faster than going out to the Internet to perform
-				// an update.
-				if dep.Reference != "" {
-					version, err := repo.Version()
-					if err != nil {
-						return err
-					}
-					ib, err := isBranch(dep.Reference, repo)
-					if err != nil {
-						return err
-					}
-
-					// If the current version equals the ref and it's not a
-					// branch it's a tag or commit id so we can skip
-					// performing an update.
-					if version == dep.Reference && !ib {
-						Info("%s is already set to version %s. Skipping update.", dep.Name, dep.Reference)
-						return nil
-					}
-				}
-
-				if err := repo.Update(); err != nil {
-					Warn("Download failed.\n")
-					return err
-				}
-			}
-		}
-	}
-
-	return nil
-}
-
-// VcsVersion set the VCS version for a checkout.
-func VcsVersion(dep *cfg.Dependency, vend string) error {
-	cwd := filepath.Join(vend, filepath.FromSlash(dep.Name))
-
-	// If there is no refernece configured there is nothing to set.
-	if dep.Reference == "" {
-		// Before exiting update the pinned version
-		repo, err := dep.GetRepo(cwd)
-		if err != nil {
-			return err
-		}
-		dep.Pin, err = repo.Version()
-		if err != nil {
-			return err
-		}
-		return nil
-	}
-
-	// When the directory is not empty and has no VCS directory it's
-	// a vendored files situation.
-	empty, err := isDirectoryEmpty(cwd)
-	if err != nil {
-		return err
-	}
-	_, err = v.DetectVcsFromFS(cwd)
-	if empty == false && err == v.ErrCannotDetectVCS {
-		Warn("%s appears to be a vendored package. Unable to set new version. Consider the '--update-vendored' flag.\n", dep.Name)
-	} else {
-		repo, err := dep.GetRepo(cwd)
-		if err != nil {
-			return err
-		}
-
-		ver := dep.Reference
-		// Referenes in Git can begin with a ^ which is similar to semver.
-		// If there is a ^ prefix we assume it's a semver constraint rather than
-		// part of the git/VCS commit id.
-		if repo.IsReference(ver) && !strings.HasPrefix(ver, "^") {
-			Info("Setting version for %s to %s.\n", dep.Name, ver)
-		} else {
-
-			// Create the constraing first to make sure it's valid before
-			// working on the repo.
-			constraint, err := semver.NewConstraint(ver)
-
-			// Make sure the constriant is valid. At this point it's not a valid
-			// reference so if it's not a valid constrint we can exit early.
-			if err != nil {
-				Warn("The reference '%s' is not valid\n", ver)
-				return err
-			}
-
-			// Get the tags and branches (in that order)
-			refs, err := getAllVcsRefs(repo)
-			if err != nil {
-				return err
-			}
-
-			// Convert and filter the list to semver.Version instances
-			semvers := getSemVers(refs)
-
-			// Sort semver list
-			sort.Sort(sort.Reverse(semver.Collection(semvers)))
-			found := false
-			for _, v := range semvers {
-				if constraint.Check(v) {
-					found = true
-					// If the constrint passes get the original reference
-					ver = v.Original()
-					break
-				}
-			}
-			if found {
-				Info("Detected semantic version. Setting version for %s to %s.\n", dep.Name, ver)
-			} else {
-				Warn("Unable to find semantic version for constraint %s %s\n", dep.Name, ver)
-			}
-		}
-		if err := repo.UpdateVersion(ver); err != nil {
-			Error("Failed to set version to %s: %s\n", dep.Reference, err)
-			return err
-		}
-		dep.Pin, err = repo.Version()
-		if err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
-// VcsLastCommit gets the last commit ID from the given dependency.
-func VcsLastCommit(dep *cfg.Dependency, vend string) (string, error) {
-	cwd := filepath.Join(vend, filepath.FromSlash(dep.Name))
-	repo, err := dep.GetRepo(cwd)
-	if err != nil {
-		return "", err
-	}
-
-	if repo.CheckLocal() == false {
-		return "", fmt.Errorf("%s is not a VCS repo\n", dep.Name)
-	}
-
-	version, err := repo.Version()
-	if err != nil {
-		return "", err
-	}
-
-	return version, nil
-}
-
-// Some repos will have multiple branches in them (e.g. Git) while others
-// (e.g. Svn) will not.
-func defaultBranch(repo v.Repo, home string) string {
-
-	// Svn and Bzr use different locations (paths or entire locations)
-	// for branches so we won't have a default branch.
-	if repo.Vcs() == v.Svn || repo.Vcs() == v.Bzr {
-		return ""
-	}
-
-	// Check the cache for a value.
-	key, kerr := cacheCreateKey(repo.Remote())
-	var d cacheRepoInfo
-	if kerr == nil {
-		d, err := cacheRepoData(key, home)
-		if err == nil {
-			if d.DefaultBranch != "" {
-				return d.DefaultBranch
-			}
-		}
-	}
-
-	// If we don't have it in the store try some APIs
-	r := repo.Remote()
-	u, err := url.Parse(r)
-	if err != nil {
-		return ""
-	}
-	if u.Scheme == "" {
-		// Where there is no scheme we try urls like git@github.com:foo/bar
-		r = strings.Replace(r, ":", "/", -1)
-		r = "ssh://" + r
-		u, err = url.Parse(r)
-		if err != nil {
-			return ""
-		}
-		u.Scheme = ""
-	}
-	if u.Host == "github.com" {
-		parts := strings.Split(u.Path, "/")
-		if len(parts) != 2 {
-			return ""
-		}
-		api := fmt.Sprintf("https://api.github.com/repos/%s/%s", parts[0], parts[1])
-		resp, err := http.Get(api)
-		if err != nil {
-			return ""
-		}
-		defer resp.Body.Close()
-		if resp.StatusCode >= 300 || resp.StatusCode < 200 {
-			return ""
-		}
-		body, err := ioutil.ReadAll(resp.Body)
-		var data interface{}
-		err = json.Unmarshal(body, &data)
-		if err != nil {
-			return ""
-		}
-		gh := data.(map[string]interface{})
-		db := gh["default_branch"].(string)
-		if kerr == nil {
-			d.DefaultBranch = db
-			err := saveCacheRepoData(key, d, home)
-			if err == errCacheDisabled {
-				Debug("Unable to cache default branch because caching is disabled")
-			} else if err != nil {
-				Debug("Error saving %s to cache. Error: %s", repo.Remote(), err)
-			}
-		}
-		return db
-	}
-
-	if u.Host == "bitbucket.org" {
-		parts := strings.Split(u.Path, "/")
-		if len(parts) != 2 {
-			return ""
-		}
-		api := fmt.Sprintf("https://bitbucket.org/api/1.0/repositories/%s/%s/main-branch/", parts[0], parts[1])
-		resp, err := http.Get(api)
-		if err != nil {
-			return ""
-		}
-		defer resp.Body.Close()
-		if resp.StatusCode >= 300 || resp.StatusCode < 200 {
-			return ""
-		}
-		body, err := ioutil.ReadAll(resp.Body)
-		var data interface{}
-		err = json.Unmarshal(body, &data)
-		if err != nil {
-			return ""
-		}
-		bb := data.(map[string]interface{})
-		db := bb["name"].(string)
-		if kerr == nil {
-			d.DefaultBranch = db
-			err := saveCacheRepoData(key, d, home)
-			if err == errCacheDisabled {
-				Debug("Unable to cache default branch because caching is disabled")
-			} else if err != nil {
-				Debug("Error saving %s to cache. Error: %s", repo.Remote(), err)
-			}
-		}
-		return db
-	}
-
-	return ""
-}
-
-// From a local repo find out the current branch name if there is one.
-func findCurrentBranch(repo v.Repo) string {
-	Debug("Attempting to find current branch for %s", repo.Remote())
-	// Svn and Bzr don't have default branches.
-	if repo.Vcs() == v.Svn || repo.Vcs() == v.Bzr {
-		return ""
-	}
-
-	if repo.Vcs() == v.Git {
-		c := exec.Command("git", "symbolic-ref", "--short", "HEAD")
-		c.Dir = repo.LocalPath()
-		c.Env = envForDir(c.Dir)
-		out, err := c.CombinedOutput()
-		if err != nil {
-			Debug("Unable to find current branch for %s, error: %s", repo.Remote(), err)
-			return ""
-		}
-		return strings.TrimSpace(string(out))
-	}
-
-	if repo.Vcs() == v.Hg {
-		c := exec.Command("hg", "branch")
-		c.Dir = repo.LocalPath()
-		c.Env = envForDir(c.Dir)
-		out, err := c.CombinedOutput()
-		if err != nil {
-			Debug("Unable to find current branch for %s, error: %s", repo.Remote(), err)
-			return ""
-		}
-		return strings.TrimSpace(string(out))
-	}
-
-	return ""
-}
-
-// list2map takes a list of packages names and creates a map of normalized names.
-func list2map(in []string) map[string]bool {
-	out := make(map[string]bool, len(in))
-	for _, v := range in {
-		v, _ := NormalizeName(v)
-		out[v] = true
-	}
-	return out
-}
-
-func envForDir(dir string) []string {
-	env := os.Environ()
-	return mergeEnvLists([]string{"PWD=" + dir}, env)
-}
-
-func mergeEnvLists(in, out []string) []string {
-NextVar:
-	for _, inkv := range in {
-		k := strings.SplitAfterN(inkv, "=", 2)[0]
-		for i, outkv := range out {
-			if strings.HasPrefix(outkv, k) {
-				out[i] = inkv
-				continue NextVar
-			}
-		}
-		out = append(out, inkv)
-	}
-	return out
-}
diff --git a/cmd/get_imports_test.go b/cmd/get_imports_test.go
deleted file mode 100644
index b072ed4..0000000
--- a/cmd/get_imports_test.go
+++ /dev/null
@@ -1,8 +0,0 @@
-package cmd
-
-import "github.com/Masterminds/cookoo"
-
-func SilenceLogs(c cookoo.Context) {
-	p := cookoo.NewParamsWithValues(map[string]interface{}{"quiet": true})
-	BeQuiet(c, p)
-}
diff --git a/cmd/gpm.go b/cmd/gpm.go
deleted file mode 100644
index eef097f..0000000
--- a/cmd/gpm.go
+++ /dev/null
@@ -1,110 +0,0 @@
-package cmd
-
-import (
-	"bufio"
-	"os"
-	"path/filepath"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-// This file contains commands for working with GPM/GVP.
-
-// HasGPMGodeps indicates whether a Godeps file exists.
-func HasGPMGodeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", "", p)
-	path := filepath.Join(dir, "Godeps")
-	_, err := os.Stat(path)
-	return err == nil, nil
-}
-
-// GPMGodeps parses a GPM-flavored Godeps file.
-//
-// Params
-// 	- dir (string): Directory root.
-//
-// Returns an []*cfg.Dependency
-func GPMGodeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", "", p)
-	return parseGPMGodeps(dir)
-}
-func parseGPMGodeps(dir string) ([]*cfg.Dependency, error) {
-	path := filepath.Join(dir, "Godeps")
-	if i, err := os.Stat(path); err != nil {
-		return []*cfg.Dependency{}, nil
-	} else if i.IsDir() {
-		Info("Godeps is a directory. This is probably a Godep project.\n")
-		return []*cfg.Dependency{}, nil
-	}
-	Info("Found Godeps file.\n")
-
-	buf := []*cfg.Dependency{}
-
-	file, err := os.Open(path)
-	if err != nil {
-		return buf, err
-	}
-	scanner := bufio.NewScanner(file)
-	for scanner.Scan() {
-		parts, ok := parseGodepsLine(scanner.Text())
-		if ok {
-			dep := &cfg.Dependency{Name: parts[0]}
-			if len(parts) > 1 {
-				dep.Reference = parts[1]
-			}
-			buf = append(buf, dep)
-		}
-	}
-	if err := scanner.Err(); err != nil {
-		Warn("Scan failed: %s\n", err)
-		return buf, err
-	}
-
-	return buf, nil
-}
-
-// GPMGodepsGit reads a Godeps-Git file for gpm-git.
-func GPMGodepsGit(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", "", p)
-	path := filepath.Join(dir, "Godeps-Git")
-	if _, err := os.Stat(path); err != nil {
-		return []*cfg.Dependency{}, nil
-	}
-	Info("Found Godeps-Git file.\n")
-
-	buf := []*cfg.Dependency{}
-
-	file, err := os.Open(path)
-	if err != nil {
-		return buf, err
-	}
-	scanner := bufio.NewScanner(file)
-	for scanner.Scan() {
-		parts, ok := parseGodepsLine(scanner.Text())
-		if ok {
-			dep := &cfg.Dependency{Name: parts[1], Repository: parts[0]}
-			if len(parts) > 2 {
-				dep.Reference = parts[2]
-			}
-			buf = append(buf, dep)
-		}
-	}
-	if err := scanner.Err(); err != nil {
-		Warn("Scan failed: %s\n", err)
-		return buf, err
-	}
-
-	return buf, nil
-}
-
-func parseGodepsLine(line string) ([]string, bool) {
-	line = strings.TrimSpace(line)
-
-	if len(line) == 0 || strings.HasPrefix(line, "#") {
-		return []string{}, false
-	}
-
-	return strings.Fields(line), true
-}
diff --git a/cmd/guess_deps.go b/cmd/guess_deps.go
deleted file mode 100644
index 1ab3ebc..0000000
--- a/cmd/guess_deps.go
+++ /dev/null
@@ -1,169 +0,0 @@
-package cmd
-
-import (
-	"os"
-	"path/filepath"
-	"sort"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-	"github.com/Masterminds/glide/dependency"
-	"github.com/Masterminds/glide/util"
-)
-
-// GuessDeps tries to get the dependencies for the current directory.
-//
-// Params
-//  - dirname (string): Directory to use as the base. Default: "."
-//  - skipImport (book): Whether to skip importing from Godep, GPM, and gb
-func GuessDeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	buildContext, err := util.GetBuildContext()
-	if err != nil {
-		return nil, err
-	}
-	base := p.Get("dirname", ".").(string)
-	skipImport := p.Get("skipImport", false).(bool)
-	name := guessPackageName(buildContext, base)
-
-	Info("Generating a YAML configuration file and guessing the dependencies")
-
-	config := new(cfg.Config)
-
-	// Get the name of the top level package
-	config.Name = name
-
-	// Import by looking at other package managers and looking over the
-	// entire directory structure.
-
-	// Attempt to import from other package managers.
-	if !skipImport {
-		Info("Attempting to import from other package managers (use --skip-import to skip)")
-		deps := []*cfg.Dependency{}
-		absBase, err := filepath.Abs(base)
-		if err != nil {
-			return nil, err
-		}
-
-		if d, ok := guessImportGodep(absBase); ok {
-			Info("Importing Godep configuration")
-			Warn("Godep uses commit id versions. Consider using Semantic Versions with Glide")
-			deps = d
-		} else if d, ok := guessImportGPM(absBase); ok {
-			Info("Importing GPM configuration")
-			deps = d
-		} else if d, ok := guessImportGB(absBase); ok {
-			Info("Importing GB configuration")
-			deps = d
-		}
-
-		for _, i := range deps {
-			Info("Found imported reference to %s\n", i.Name)
-			config.Imports = append(config.Imports, i)
-		}
-	}
-
-	// Resolve dependencies by looking at the tree.
-	r, err := dependency.NewResolver(base)
-	if err != nil {
-		return nil, err
-	}
-
-	h := &dependency.DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}}
-	r.Handler = h
-
-	sortable, err := r.ResolveLocal(false)
-	if err != nil {
-		return nil, err
-	}
-
-	sort.Strings(sortable)
-
-	vpath := r.VendorDir
-	if !strings.HasSuffix(vpath, "/") {
-		vpath = vpath + string(os.PathSeparator)
-	}
-
-	for _, pa := range sortable {
-		n := strings.TrimPrefix(pa, vpath)
-		root := util.GetRootFromPackage(n)
-
-		if !config.HasDependency(root) {
-			Info("Found reference to %s\n", n)
-			d := &cfg.Dependency{
-				Name: root,
-			}
-			subpkg := strings.TrimPrefix(n, root)
-			if len(subpkg) > 0 && subpkg != "/" {
-				d.Subpackages = []string{subpkg}
-			}
-			config.Imports = append(config.Imports, d)
-		} else {
-			subpkg := strings.TrimPrefix(n, root)
-			if len(subpkg) > 0 && subpkg != "/" {
-				subpkg = strings.TrimPrefix(subpkg, "/")
-				d := config.Imports.Get(root)
-				f := false
-				for _, v := range d.Subpackages {
-					if v == subpkg {
-						f = true
-					}
-				}
-				if !f {
-					Info("Adding sub-package %s to %s\n", subpkg, root)
-					d.Subpackages = append(d.Subpackages, subpkg)
-				}
-			}
-		}
-	}
-
-	return config, nil
-}
-
-// Attempt to guess at the package name at the top level. When unable to detect
-// a name goes to default of "main".
-func guessPackageName(b *util.BuildCtxt, base string) string {
-	cwd, err := os.Getwd()
-	if err != nil {
-		return "main"
-	}
-
-	pkg, err := b.Import(base, cwd, 0)
-	if err != nil {
-		// There may not be any top level Go source files but the project may
-		// still be within the GOPATH.
-		if strings.HasPrefix(base, b.GOPATH) {
-			p := strings.TrimPrefix(base, b.GOPATH)
-			return strings.Trim(p, string(os.PathSeparator))
-		}
-	}
-
-	return pkg.ImportPath
-}
-
-func guessImportGodep(dir string) ([]*cfg.Dependency, bool) {
-	d, err := parseGodepGodeps(dir)
-	if err != nil || len(d) == 0 {
-		return []*cfg.Dependency{}, false
-	}
-
-	return d, true
-}
-
-func guessImportGPM(dir string) ([]*cfg.Dependency, bool) {
-	d, err := parseGPMGodeps(dir)
-	if err != nil || len(d) == 0 {
-		return []*cfg.Dependency{}, false
-	}
-
-	return d, true
-}
-
-func guessImportGB(dir string) ([]*cfg.Dependency, bool) {
-	d, err := parseGbManifest(dir)
-	if err != nil || len(d) == 0 {
-		return []*cfg.Dependency{}, false
-	}
-
-	return d, true
-}
diff --git a/cmd/install.go b/cmd/install.go
deleted file mode 100644
index ea4b86a..0000000
--- a/cmd/install.go
+++ /dev/null
@@ -1,142 +0,0 @@
-package cmd
-
-import (
-	"io/ioutil"
-	"os"
-	"sync"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-// LockFileExists checks if a lock file exists. If not it jumps to the update
-// command.
-func LockFileExists(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	fname := p.Get("filename", "glide.lock").(string)
-	if _, err := os.Stat(fname); err != nil {
-		Info("Lock file (glide.lock) does not exist. Performing update.")
-		return false, &cookoo.Reroute{"update"}
-	}
-
-	return true, nil
-}
-
-// LoadLockFile loads the lock file to the context and checks if it is correct
-// for the loaded cfg file.
-func LoadLockFile(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	fname := p.Get("filename", "glide.lock").(string)
-	conf := p.Get("conf", nil).(*cfg.Config)
-
-	yml, err := ioutil.ReadFile(fname)
-	if err != nil {
-		return nil, err
-	}
-	lock, err := cfg.LockfileFromYaml(yml)
-	if err != nil {
-		return nil, err
-	}
-
-	hash, err := conf.Hash()
-	if err != nil {
-		return nil, err
-	}
-
-	if hash != lock.Hash {
-		Warn("Lock file may be out of date. Hash check of YAML failed. You may need to run 'update'")
-	}
-
-	return lock, nil
-}
-
-// Install installs the dependencies from a Lockfile.
-func Install(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	lock := p.Get("lock", nil).(*cfg.Lockfile)
-	conf := p.Get("conf", nil).(*cfg.Config)
-	force := p.Get("force", true).(bool)
-	home := p.Get("home", "").(string)
-	cache := p.Get("cache", false).(bool)
-	cacheGopath := p.Get("cacheGopath", false).(bool)
-	useGopath := p.Get("useGopath", false).(bool)
-
-	cwd, err := VendorPath(c)
-	if err != nil {
-		return false, err
-	}
-
-	// Create a config setup based on the Lockfile data to process with
-	// existing commands.
-	newConf := &cfg.Config{}
-	newConf.Name = conf.Name
-
-	newConf.Imports = make(cfg.Dependencies, len(lock.Imports))
-	for k, v := range lock.Imports {
-		newConf.Imports[k] = &cfg.Dependency{
-			Name:        v.Name,
-			Reference:   v.Version,
-			Repository:  v.Repository,
-			VcsType:     v.VcsType,
-			Subpackages: v.Subpackages,
-			Arch:        v.Arch,
-			Os:          v.Os,
-		}
-	}
-
-	newConf.DevImports = make(cfg.Dependencies, len(lock.DevImports))
-	for k, v := range lock.DevImports {
-		newConf.DevImports[k] = &cfg.Dependency{
-			Name:        v.Name,
-			Reference:   v.Version,
-			Repository:  v.Repository,
-			VcsType:     v.VcsType,
-			Subpackages: v.Subpackages,
-			Arch:        v.Arch,
-			Os:          v.Os,
-		}
-	}
-
-	newConf.DeDupe()
-
-	if len(newConf.Imports) == 0 {
-		Info("No dependencies found. Nothing installed.\n")
-		return false, nil
-	}
-
-	// for _, dep := range newConf.Imports {
-	// 	if err := VcsUpdate(dep, cwd, home, force, cache, cacheGopath, useGopath); err != nil {
-	// 		Warn("Update failed for %s: %s\n", dep.Name, err)
-	// 	}
-	// }
-
-	done := make(chan struct{}, concurrentWorkers)
-	in := make(chan *cfg.Dependency, concurrentWorkers)
-	var wg sync.WaitGroup
-
-	for i := 0; i < concurrentWorkers; i++ {
-		go func(ch <-chan *cfg.Dependency) {
-			for {
-				select {
-				case dep := <-ch:
-					if err := VcsUpdate(dep, cwd, home, force, cache, cacheGopath, useGopath); err != nil {
-						Warn("Update failed for %s: %s\n", dep.Name, err)
-					}
-					wg.Done()
-				case <-done:
-					return
-				}
-			}
-		}(in)
-	}
-
-	for _, dep := range newConf.Imports {
-		wg.Add(1)
-		in <- dep
-	}
-
-	wg.Wait()
-	// Close goroutines setting the version
-	for i := 0; i < concurrentWorkers; i++ {
-		done <- struct{}{}
-	}
-
-	return newConf, nil
-}
diff --git a/cmd/link_package.go b/cmd/link_package.go
deleted file mode 100644
index 214b094..0000000
--- a/cmd/link_package.go
+++ /dev/null
@@ -1,50 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"os"
-	"path"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-// LinkPackage creates a symlink to the project within the GOPATH.
-func LinkPackage(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	conf := c.Get("cfg", "").(*cfg.Config)
-	pname := p.Get("path", conf.Name).(string)
-
-	// Per issue #10, this may be nicer to work with in cases where repos are
-	// moved.
-	//here := "../.."
-	depth := strings.Count(pname, "/")
-	here := "../.." + strings.Repeat("/..", depth)
-
-	gopath := Gopath()
-	if gopath == "" {
-		return nil, fmt.Errorf("$GOPATH appears to be unset")
-	}
-	if len(pname) == 0 {
-		return nil, fmt.Errorf("glide.yaml is missing 'package:'")
-	}
-
-	base := path.Dir(pname)
-	if base != "." {
-		dir := fmt.Sprintf("%s/src/%s", gopath, base)
-		if err := os.MkdirAll(dir, os.ModeDir|0755); err != nil {
-			return nil, fmt.Errorf("Failed to make directory %s: %s", dir, err)
-		}
-	}
-
-	ldest := fmt.Sprintf("%s/src/%s", gopath, pname)
-	if err := os.Symlink(here, ldest); err != nil {
-		if os.IsExist(err) {
-			Info("Link to %s already exists. Skipping.\n", ldest)
-		} else {
-			return nil, fmt.Errorf("Failed to create symlink from %s to %s: %s", gopath, ldest, err)
-		}
-	}
-
-	return ldest, nil
-}
diff --git a/cmd/mkdir.go b/cmd/mkdir.go
deleted file mode 100644
index d478845..0000000
--- a/cmd/mkdir.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"os"
-
-	"github.com/Masterminds/cookoo"
-)
-
-// Mkdir creates the src directory within the GOPATH.
-func Mkdir(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-
-	target := p.Get("dir", "").(string)
-	if len(target) == 0 {
-		return nil, fmt.Errorf("Vendor path appears to be unset")
-	}
-
-	if err := os.MkdirAll(target, os.ModeDir|0755); err != nil {
-		return false, fmt.Errorf("Failed to make directory %s: %s", target, err)
-	}
-
-	return true, nil
-}
diff --git a/cmd/msg.go b/cmd/msg.go
deleted file mode 100644
index a6dcc14..0000000
--- a/cmd/msg.go
+++ /dev/null
@@ -1,100 +0,0 @@
-// +build !windows
-
-package cmd
-
-import (
-	"fmt"
-	"os"
-	"strings"
-	"sync"
-)
-
-// These contanstants map to color codes for shell scripts making them
-// human readable.
-const (
-	Blue   = "0;34"
-	Red    = "0;31"
-	Green  = "0;32"
-	Yellow = "0;33"
-	Cyan   = "0;36"
-	Pink   = "1;35"
-)
-
-var outputLock sync.Mutex
-
-// Color returns a string in a certain color. The first argument is a string
-// containing the color code or a constant from the table above mapped to a code.
-//
-// The following will print the string "Foo" in yellow:
-//     fmt.Print(Color(Yellow, "Foo"))
-func Color(code, msg string) string {
-	if NoColor {
-		return msg
-	}
-	return fmt.Sprintf("\033[%sm%s\033[m", code, msg)
-}
-
-// Info logs information
-func Info(msg string, args ...interface{}) {
-	if Quiet {
-		return
-	}
-	i := fmt.Sprint(Color(Green, "[INFO] "))
-	Msg(i+msg, args...)
-}
-
-// Debug logs debug information
-func Debug(msg string, args ...interface{}) {
-	if Quiet || !IsDebugging {
-		return
-	}
-	i := fmt.Sprint("[DEBUG] ")
-	Msg(i+msg, args...)
-}
-
-// Warn logs a warning
-func Warn(msg string, args ...interface{}) {
-	i := fmt.Sprint(Color(Yellow, "[WARN] "))
-	ErrMsg(i+msg, args...)
-}
-
-// Error logs and error.
-func Error(msg string, args ...interface{}) {
-	i := fmt.Sprint(Color(Red, "[ERROR] "))
-	ErrMsg(i+msg, args...)
-}
-
-// ErrMsg sends a message to Stderr
-func ErrMsg(msg string, args ...interface{}) {
-	outputLock.Lock()
-	defer outputLock.Unlock()
-
-	// If messages don't have a newline on the end we add one.
-	e := ""
-	if !strings.HasSuffix(msg, "\n") {
-		e = "\n"
-	}
-	if len(args) == 0 {
-		fmt.Fprint(os.Stderr, msg+e)
-	} else {
-		fmt.Fprintf(os.Stderr, msg+e, args...)
-	}
-}
-
-// Msg prints a message with optional arguments, that can be printed, of
-// varying types.
-func Msg(msg string, args ...interface{}) {
-	outputLock.Lock()
-	defer outputLock.Unlock()
-
-	// If messages don't have a newline on the end we add one.
-	e := ""
-	if !strings.HasSuffix(msg, "\n") {
-		e = "\n"
-	}
-	if len(args) == 0 {
-		fmt.Fprint(os.Stderr, msg+e)
-	} else {
-		fmt.Fprintf(os.Stderr, msg+e, args...)
-	}
-}
diff --git a/cmd/msg_windows.go b/cmd/msg_windows.go
deleted file mode 100644
index 6774ebd..0000000
--- a/cmd/msg_windows.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// +build windows
-
-package cmd
-
-import (
-	"fmt"
-	"os"
-	"strings"
-)
-
-// Info logs information
-func Info(msg string, args ...interface{}) {
-	if Quiet {
-		return
-	}
-	fmt.Print("[INFO] ")
-	Msg(msg, args...)
-}
-
-// Debug logs debug information
-func Debug(msg string, args ...interface{}) {
-	if Quiet || !IsDebugging {
-		return
-	}
-	fmt.Print("[DEBUG] ")
-	Msg(msg, args...)
-}
-
-// Warn logs a warning
-func Warn(msg string, args ...interface{}) {
-	fmt.Fprint(os.Stderr, "[WARN] ")
-	ErrMsg(msg, args...)
-}
-
-// Error logs and error.
-func Error(msg string, args ...interface{}) {
-	fmt.Fprint(os.Stderr, "[ERROR] ")
-	ErrMsg(msg, args...)
-}
-
-// ErrMsg sends a message to Stderr
-func ErrMsg(msg string, args ...interface{}) {
-	if len(args) == 0 {
-		fmt.Fprint(os.Stderr, msg)
-		return
-	}
-	fmt.Fprintf(os.Stderr, msg, args...)
-}
-
-// Msg prints a message with optional arguments, that can be printed, of
-// varying types.
-func Msg(msg string, args ...interface{}) {
-	if len(args) == 0 {
-		fmt.Print(msg)
-		return
-	}
-	fmt.Printf(msg, args...)
-
-	// Get rid of the annoying fact that messages need \n at the end, but do
-	// it in a backward compatible way.
-	if !strings.HasSuffix(msg, "\n") {
-		fmt.Println("")
-	}
-}
diff --git a/cmd/novendor.go b/cmd/novendor.go
deleted file mode 100644
index b0adcd3..0000000
--- a/cmd/novendor.go
+++ /dev/null
@@ -1,134 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"os"
-	"path/filepath"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-)
-
-// NoVendor takes a path and returns all subpaths that are not vendor directories.
-//
-// It is not recursive.
-//
-// If the given path is a file, it returns that path unaltered.
-//
-// If the given path is a directory, it scans all of the immediate children,
-// and returns all of the go files and directories that are not vendor.
-func NoVendor(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	path := p.Get("path", ".").(string)
-	gonly := p.Get("onlyGo", true).(bool)
-
-	return noVend(path, gonly)
-}
-
-// Take a list of paths and print a single string with space-separated paths.
-func PathString(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	paths := p.Get("paths", []string{}).([]string)
-	s := strings.Join(paths, " ")
-	fmt.Println(s)
-	return nil, nil
-}
-
-// noVend takes a directory and returns a list of Go-like files or directories,
-// provided the directory is not a vendor directory.
-//
-// If onlyGo is true, this will filter out all directories that do not contain
-// ".go" files.
-func noVend(path string, onlyGo bool) ([]string, error) {
-
-	info, err := os.Stat(path)
-	if err != nil {
-		return []string{}, err
-	}
-
-	if !info.IsDir() {
-		return []string{path}, nil
-	}
-
-	res := []string{}
-	f, err := os.Open(path)
-	if err != nil {
-		return res, err
-	}
-
-	fis, err := f.Readdir(0)
-	if err != nil {
-		return res, err
-	}
-
-	cur := false
-
-	for _, fi := range fis {
-		if exclude(fi) {
-			continue
-		}
-
-		full := filepath.Join(path, fi.Name())
-		if fi.IsDir() && !isVend(fi) {
-			p := "./" + full + "/..."
-			res = append(res, p)
-		} else if !fi.IsDir() && isGoish(fi) {
-			//res = append(res, full)
-			cur = true
-		}
-	}
-
-	// Filter out directories that do not contain Go code
-	if onlyGo {
-		res = hasGoSource(res)
-	}
-
-	if cur {
-		res = append(res, ".")
-	}
-
-	return res, nil
-}
-
-func hasGoSource(dirs []string) []string {
-	buf := []string{}
-	for _, d := range dirs {
-		d := filepath.Dir(d)
-		found := false
-		walker := func(p string, fi os.FileInfo, err error) error {
-			// Dumb optimization
-			if found {
-				return nil
-			}
-
-			// If the file ends with .go, report a match.
-			if strings.ToLower(filepath.Ext(p)) == ".go" {
-				found = true
-			}
-
-			return nil
-		}
-		filepath.Walk(d, walker)
-
-		if found {
-			buf = append(buf, "./"+d+"/...")
-		}
-	}
-	return buf
-}
-
-func isVend(fi os.FileInfo) bool {
-	return fi.Name() == "vendor"
-}
-
-func exclude(fi os.FileInfo) bool {
-	if strings.HasPrefix(fi.Name(), "_") {
-		return true
-	}
-	if strings.HasPrefix(fi.Name(), ".") {
-		return true
-	}
-	return false
-}
-
-func isGoish(fi os.FileInfo) bool {
-	return filepath.Ext(fi.Name()) == ".go"
-}
diff --git a/cmd/print_name.go b/cmd/print_name.go
deleted file mode 100644
index 6a6d26d..0000000
--- a/cmd/print_name.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-// PrintName prints the name of the project.
-//
-// This comes from Config.Name.
-func PrintName(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	conf := p.Get("conf", nil).(*cfg.Config)
-	fmt.Println(conf.Name)
-	return nil, nil
-}
diff --git a/cmd/rebuild.go b/cmd/rebuild.go
deleted file mode 100644
index 07a1449..0000000
--- a/cmd/rebuild.go
+++ /dev/null
@@ -1,99 +0,0 @@
-package cmd
-
-import (
-	"os"
-	"os/exec"
-	"path"
-	"path/filepath"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-// Rebuild runs 'go build' in a directory.
-//
-// Params:
-// 	- conf: the *cfg.Config.
-//
-func Rebuild(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	conf := p.Get("conf", nil).(*cfg.Config)
-	vpath, err := VendorPath(c)
-	if err != nil {
-		return nil, err
-	}
-
-	Info("Building dependencies.\n")
-
-	if len(conf.Imports) == 0 {
-		Info("No dependencies found. Nothing built.\n")
-		return true, nil
-	}
-
-	for _, dep := range conf.Imports {
-		if err := buildDep(c, dep, vpath); err != nil {
-			Warn("Failed to build %s: %s\n", dep.Name, err)
-		}
-	}
-
-	return true, nil
-}
-
-func buildDep(c cookoo.Context, dep *cfg.Dependency, vpath string) error {
-	if len(dep.Subpackages) == 0 {
-		buildPath(c, dep.Name)
-	}
-
-	for _, pkg := range dep.Subpackages {
-		if pkg == "**" || pkg == "..." {
-			//Info("Building all packages in %s\n", dep.Name)
-			buildPath(c, path.Join(dep.Name, "..."))
-		} else {
-			paths, err := resolvePackages(vpath, dep.Name, pkg)
-			if err != nil {
-				Warn("Error resolving packages: %s", err)
-			}
-			buildPaths(c, paths)
-		}
-	}
-
-	return nil
-}
-
-func resolvePackages(vpath, pkg, subpkg string) ([]string, error) {
-	sdir, _ := os.Getwd()
-	if err := os.Chdir(filepath.Join(vpath, filepath.FromSlash(pkg), filepath.FromSlash(subpkg))); err != nil {
-		return []string{}, err
-	}
-	defer os.Chdir(sdir)
-	p, err := filepath.Glob(filepath.Join(vpath, filepath.FromSlash(pkg), filepath.FromSlash(subpkg)))
-	if err != nil {
-		return []string{}, err
-	}
-	for k, v := range p {
-		nv := strings.TrimPrefix(v, vpath)
-		p[k] = strings.TrimPrefix(nv, string(filepath.Separator))
-	}
-	return p, nil
-}
-
-func buildPaths(c cookoo.Context, paths []string) error {
-	for _, path := range paths {
-		if err := buildPath(c, path); err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
-func buildPath(c cookoo.Context, path string) error {
-	Info("Running go build %s\n", path)
-	// . in a filepath.Join is removed so it needs to be prepended separately.
-	p := "." + string(filepath.Separator) + filepath.Join("vendor", filepath.FromSlash(path))
-	out, err := exec.Command("go", "install", p).CombinedOutput()
-	if err != nil {
-		Warn("Failed to run 'go install' for %s: %s", path, string(out))
-	}
-	return err
-}
diff --git a/cmd/tree.go b/cmd/tree.go
deleted file mode 100644
index 252405e..0000000
--- a/cmd/tree.go
+++ /dev/null
@@ -1,331 +0,0 @@
-package cmd
-
-import (
-	"container/list"
-	"fmt"
-	"os"
-	"path/filepath"
-	"sort"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/dependency"
-	"github.com/Masterminds/glide/msg"
-	"github.com/Masterminds/glide/util"
-)
-
-// Tree prints a tree representing dependencies.
-func Tree(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	buildContext, err := util.GetBuildContext()
-	if err != nil {
-		return nil, err
-	}
-	showcore := p.Get("showcore", false).(bool)
-	basedir := p.Get("dir", ".").(string)
-	myName := guessPackageName(buildContext, basedir)
-
-	if basedir == "." {
-		var err error
-		basedir, err = os.Getwd()
-		if err != nil {
-			Error("Could not get working directory")
-			return nil, err
-		}
-	}
-
-	fmt.Println(myName)
-	l := list.New()
-	l.PushBack(myName)
-	displayTree(buildContext, basedir, myName, 1, showcore, l)
-	return nil, nil
-}
-
-// ListDeps lists all of the dependencies of the current project.
-//
-// Params:
-//  - dir (string): basedir
-//  - deep (bool): whether to do a deep scan or a shallow scan
-//
-// Returns:
-//
-func ListDeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	basedir := p.Get("dir", ".").(string)
-	deep := p.Get("deep", true).(bool)
-
-	basedir, err := filepath.Abs(basedir)
-	if err != nil {
-		return nil, err
-	}
-
-	r, err := dependency.NewResolver(basedir)
-	if err != nil {
-		return nil, err
-	}
-	h := &dependency.DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}}
-	r.Handler = h
-
-	sortable, err := r.ResolveLocal(deep)
-	if err != nil {
-		return nil, err
-	}
-
-	sort.Strings(sortable)
-
-	fmt.Println("INSTALLED packages:")
-	for _, k := range sortable {
-		v, err := filepath.Rel(basedir, k)
-		if err != nil {
-			msg.Warn("Failed to Rel path: %s", err)
-			v = k
-		}
-		fmt.Printf("\t%s\n", v)
-	}
-
-	if len(h.Missing) > 0 {
-		fmt.Println("\nMISSING packages:")
-		for _, pkg := range h.Missing {
-			fmt.Printf("\t%s\n", pkg)
-		}
-	}
-	if len(h.Gopath) > 0 {
-		fmt.Println("\nGOPATH packages:")
-		for _, pkg := range h.Gopath {
-			fmt.Printf("\t%s\n", pkg)
-		}
-	}
-
-	return nil, nil
-}
-
-func listDeps(b *util.BuildCtxt, info map[string]*pinfo, name, path string) {
-	found := findPkg(b, name, path)
-	switch found.PType {
-	case ptypeUnknown:
-		info[name] = found
-		break
-	case ptypeGoroot, ptypeCgo:
-		break
-	default:
-		info[name] = found
-		for _, i := range walkDeps(b, found.Path, found.Name) {
-			// Only walk the deps that are not already found to avoid
-			// infinite recursion.
-			if _, f := info[found.Name]; f == false {
-				listDeps(b, info, i, found.Path)
-			}
-		}
-	}
-}
-
-func displayTree(b *util.BuildCtxt, basedir, myName string, level int, core bool, l *list.List) {
-	deps := walkDeps(b, basedir, myName)
-	for _, name := range deps {
-		found := findPkg(b, name, basedir)
-		if found.PType == ptypeUnknown {
-			msg := "glide get " + found.Name
-			fmt.Printf("\t%s\t(%s)\n", found.Name, msg)
-			continue
-		}
-		if !core && found.PType == ptypeGoroot || found.PType == ptypeCgo {
-			continue
-		}
-		fmt.Print(strings.Repeat("\t", level))
-
-		f := findInList(found.Name, l)
-		if f == true {
-			fmt.Printf("(Recursion) %s   (%s)\n", found.Name, found.Path)
-		} else {
-			// Every branch in the tree is a copy to handle all the branches
-			cl := copyList(l)
-			cl.PushBack(found.Name)
-			fmt.Printf("%s   (%s)\n", found.Name, found.Path)
-			displayTree(b, found.Path, found.Name, level+1, core, cl)
-		}
-	}
-}
-
-type ptype int8
-
-const (
-	ptypeUnknown ptype = iota
-	ptypeLocal
-	ptypeVendor
-	ptypeGopath
-	ptypeGoroot
-	ptypeCgo
-)
-
-func ptypeString(t ptype) string {
-	switch t {
-	case ptypeLocal:
-		return "local"
-	case ptypeVendor:
-		return "vendored"
-	case ptypeGopath:
-		return "gopath"
-	case ptypeGoroot:
-		return "core"
-	case ptypeCgo:
-		return "cgo"
-	default:
-		return "missing"
-	}
-}
-
-type pinfo struct {
-	Name, Path string
-	PType      ptype
-	Vendored   bool
-}
-
-func findPkg(b *util.BuildCtxt, name, cwd string) *pinfo {
-	var fi os.FileInfo
-	var err error
-	var p string
-
-	info := &pinfo{
-		Name: name,
-	}
-
-	// Recurse backward to scan other vendor/ directories
-	// If the cwd isn't an absolute path walking upwards looking for vendor/
-	// folders can get into an infinate loop.
-	abs, err := filepath.Abs(cwd)
-	if err != nil {
-		abs = cwd
-	}
-	if abs != "." {
-
-		// Previously there was a check on the loop that wd := "/". The path
-		// "/" is a POSIX path so this fails on Windows. Now the check is to
-		// make sure the same wd isn't seen twice. When the same wd happens
-		// more than once it's the beginning of looping on the same location
-		// which is the top level.
-		pwd := ""
-		for wd := abs; wd != pwd; wd = filepath.Dir(wd) {
-			pwd = wd
-
-			// Don't look for packages outside the GOPATH
-			// Note, the GOPATH may or may not end with the path separator.
-			// The output of filepath.Dir does not the the path separator on the
-			// end so we need to test both.
-			if wd == b.GOPATH || wd+string(os.PathSeparator) == b.GOPATH {
-				break
-			}
-			p = filepath.Join(wd, "vendor", filepath.FromSlash(name))
-			if fi, err = os.Stat(p); err == nil && (fi.IsDir() || isLink(fi)) {
-				info.Path = p
-				info.PType = ptypeVendor
-				info.Vendored = true
-				return info
-			}
-		}
-	}
-	// Check $GOPATH
-	for _, r := range filepath.SplitList(b.GOPATH) {
-		p = filepath.Join(r, "src", filepath.FromSlash(name))
-		if fi, err = os.Stat(p); err == nil && (fi.IsDir() || isLink(fi)) {
-			info.Path = p
-			info.PType = ptypeGopath
-			return info
-		}
-	}
-
-	// Check $GOROOT
-	for _, r := range filepath.SplitList(b.GOROOT) {
-		p = filepath.Join(r, "src", filepath.FromSlash(name))
-		if fi, err = os.Stat(p); err == nil && (fi.IsDir() || isLink(fi)) {
-			info.Path = p
-			info.PType = ptypeGoroot
-			return info
-		}
-	}
-
-	// Finally, if this is "C", we're dealing with cgo
-	if name == "C" {
-		info.PType = ptypeCgo
-	}
-
-	return info
-}
-
-func isLink(fi os.FileInfo) bool {
-	return fi.Mode()&os.ModeSymlink == os.ModeSymlink
-}
-
-func walkDeps(b *util.BuildCtxt, base, myName string) []string {
-	externalDeps := []string{}
-	filepath.Walk(base, func(path string, fi os.FileInfo, err error) error {
-		if excludeSubtree(path, fi) {
-			if fi.IsDir() {
-				return filepath.SkipDir
-			}
-			return nil
-		}
-
-		pkg, err := b.ImportDir(path, 0)
-		if err != nil {
-			if !strings.HasPrefix(err.Error(), "no buildable Go source") {
-				Warn("Error: %s (%s)", err, path)
-				// Not sure if we should return here.
-				//return err
-			}
-		}
-
-		if pkg.Goroot {
-			return nil
-		}
-
-		for _, imp := range pkg.Imports {
-			//if strings.HasPrefix(imp, myName) {
-			////Info("Skipping %s because it is a subpackage of %s", imp, myName)
-			//continue
-			//}
-			if imp == myName {
-				continue
-			}
-			externalDeps = append(externalDeps, imp)
-		}
-
-		return nil
-	})
-	return externalDeps
-}
-
-func excludeSubtree(path string, fi os.FileInfo) bool {
-	top := filepath.Base(path)
-
-	if !fi.IsDir() && !isLink(fi) {
-		return true
-	}
-
-	// Provisionally, we'll skip vendor. We definitely
-	// should skip testdata.
-	if top == "vendor" || top == "testdata" {
-		return true
-	}
-
-	// Skip anything that starts with _
-	if strings.HasPrefix(top, "_") || (strings.HasPrefix(top, ".") && top != ".") {
-		return true
-	}
-	return false
-}
-
-func copyList(l *list.List) *list.List {
-	n := list.New()
-	for e := l.Front(); e != nil; e = e.Next() {
-		n.PushBack(e.Value.(string))
-	}
-	return n
-}
-
-func findInList(n string, l *list.List) bool {
-	for e := l.Front(); e != nil; e = e.Next() {
-		if e.Value.(string) == n {
-			return true
-		}
-	}
-
-	return false
-}
diff --git a/cmd/tree_test.go b/cmd/tree_test.go
deleted file mode 100644
index afa6667..0000000
--- a/cmd/tree_test.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package cmd
-
-import (
-	"container/list"
-	"testing"
-)
-
-func TestFindInTree(t *testing.T) {
-	l := list.New()
-	l.PushBack("github.com/Masterminds/glide")
-	l.PushBack("github.com/Masterminds/vcs")
-	l.PushBack("github.com/Masterminds/semver")
-
-	f := findInList("foo", l)
-	if f != false {
-		t.Error("findInList found true instead of false")
-	}
-
-	f = findInList("github.com/Masterminds/vcs", l)
-	if f != true {
-		t.Error("findInList found false instead of true")
-	}
-}
diff --git a/cmd/util.go b/cmd/util.go
deleted file mode 100644
index 6744173..0000000
--- a/cmd/util.go
+++ /dev/null
@@ -1,215 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"io"
-	"os"
-	"os/exec"
-	"path/filepath"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-)
-
-// Quiet, when set to true, can suppress Info and Debug messages.
-var Quiet = false
-var IsDebugging = false
-var NoColor = false
-
-// BeQuiet supresses Info and Debug messages.
-func BeQuiet(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	Quiet = p.Get("quiet", false).(bool)
-	IsDebugging = p.Get("debug", false).(bool)
-	return Quiet, nil
-}
-
-// CheckColor turns off the colored output (and uses plain text output) for
-// logging depending on the value of the "no-color" flag.
-func CheckColor(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	NoColor = p.Get("no-color", false).(bool)
-	return NoColor, nil
-}
-
-// ReadyToGlide fails if the environment is not sufficient for using glide.
-//
-// Most importantly, it fails if glide.yaml is not present in the current
-// working directory.
-func ReadyToGlide(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	fname := p.Get("filename", "glide.yaml").(string)
-	if _, err := os.Stat(fname); err != nil {
-		cwd, _ := os.Getwd()
-		return false, fmt.Errorf("%s is missing from %s", fname, cwd)
-	}
-	return true, nil
-}
-
-// VersionGuard ensures that the Go version is correct.
-func VersionGuard(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	// 6l was removed in 1.5, when vendoring was introduced.
-	cmd := exec.Command("go", "tool", "6l")
-	var out string
-	if _, err := cmd.CombinedOutput(); err == nil {
-		Warn("You must install the Go 1.5 or greater toolchain to work with Glide.\n")
-	}
-	if os.Getenv("GO15VENDOREXPERIMENT") != "1" {
-		Warn("To use Glide, you must set GO15VENDOREXPERIMENT=1\n")
-	}
-
-	// Verify the setup isn't for the old version of glide. That is, this is
-	// no longer assuming the _vendor directory as the GOPATH. Inform of
-	// the change.
-	if _, err := os.Stat("_vendor/"); err == nil {
-		Warn(`Your setup appears to be for the previous version of Glide.
-Previously, vendor packages were stored in _vendor/src/ and
-_vendor was set as your GOPATH. As of Go 1.5 the go tools
-recognize the vendor directory as a location for these
-files. Glide has embraced this. Please remove the _vendor
-directory or move the _vendor/src/ directory to vendor/.` + "\n")
-	}
-
-	return out, nil
-}
-
-// CowardMode checks that the environment is setup before continuing on. If not
-// setup and error is returned.
-func CowardMode(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	gopath := Gopath()
-	if gopath == "" {
-		return false, fmt.Errorf("No GOPATH is set.\n")
-	}
-
-	_, err := os.Stat(filepath.Join(gopath, "src"))
-	if err != nil {
-		Error("Could not find %s/src. The GOPATH does not appear to be properly setup.\n", gopath)
-		Info("As of Glide 0.5/Go 1.5, this is required.\n")
-		return false, err
-	}
-
-	return true, nil
-}
-
-// Check if a directory is empty or not.
-func isDirectoryEmpty(dir string) (bool, error) {
-	f, err := os.Open(dir)
-	if err != nil {
-		return false, err
-	}
-	defer f.Close()
-
-	_, err = f.Readdir(1)
-
-	if err == io.EOF {
-		return true, nil
-	}
-
-	return false, err
-}
-
-// Gopath gets GOPATH from environment and return the most relevant path.
-//
-// A GOPATH can contain a colon-separated list of paths. This retrieves the
-// GOPATH and returns only the FIRST ("most relevant") path.
-//
-// This should be used carefully. If, for example, you are looking for a package,
-// you may be better off using Gopaths.
-func Gopath() string {
-	gopaths := Gopaths()
-	if len(gopaths) == 0 {
-		return ""
-	}
-	return gopaths[0]
-}
-
-// Gopaths retrieves the Gopath as a list when there is more than one path
-// listed in the Gopath.
-func Gopaths() []string {
-	p := os.Getenv("GOPATH")
-	p = strings.Trim(p, string(filepath.ListSeparator))
-	return filepath.SplitList(p)
-}
-
-func fileExist(name string) (bool, error) {
-	_, err := os.Stat(name)
-	if err == nil {
-		return true, nil
-	}
-	if os.IsNotExist(err) {
-		return false, nil
-	}
-	return true, err
-}
-
-// We copy the directory here rather than jumping out to a shell so we can
-// support multiple operating systems.
-func copyDir(source string, dest string) error {
-
-	// get properties of source dir
-	si, err := os.Stat(source)
-	if err != nil {
-		return err
-	}
-
-	err = os.MkdirAll(dest, si.Mode())
-	if err != nil {
-		return err
-	}
-
-	d, _ := os.Open(source)
-
-	objects, err := d.Readdir(-1)
-
-	for _, obj := range objects {
-
-		sp := filepath.Join(source, obj.Name())
-
-		dp := filepath.Join(dest, obj.Name())
-
-		if obj.IsDir() {
-			err = copyDir(sp, dp)
-			if err != nil {
-				return err
-			}
-		} else {
-			// perform copy
-			err = copyFile(sp, dp)
-			if err != nil {
-				return err
-			}
-		}
-
-	}
-	return nil
-}
-
-func copyFile(source string, dest string) error {
-	ln, err := os.Readlink(source)
-	if err == nil {
-		return os.Symlink(ln, dest)
-	}
-	s, err := os.Open(source)
-	if err != nil {
-		return err
-	}
-
-	defer s.Close()
-
-	d, err := os.Create(dest)
-	if err != nil {
-		return err
-	}
-
-	defer d.Close()
-
-	_, err = io.Copy(d, s)
-	if err != nil {
-		return err
-	}
-
-	si, err := os.Stat(source)
-	if err != nil {
-		return err
-	}
-	err = os.Chmod(dest, si.Mode())
-
-	return err
-}
diff --git a/cmd/util_test.go b/cmd/util_test.go
deleted file mode 100644
index 89a041a..0000000
--- a/cmd/util_test.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package cmd
-
-import (
-	"io/ioutil"
-	"os"
-	"testing"
-)
-
-func TestisDirectoryEmpty(t *testing.T) {
-	tempDir, err := ioutil.TempDir("", "empty-dir-test")
-	if err != nil {
-		t.Error(err)
-	}
-	defer func() {
-		err = os.RemoveAll(tempDir)
-		if err != nil {
-			t.Error(err)
-		}
-	}()
-
-	empty, err := isDirectoryEmpty(tempDir)
-	if err != nil {
-		t.Error(err)
-	}
-	if empty == false {
-		t.Error("isDirectoryEmpty reporting false on empty directory")
-	}
-
-	data := "foo bar baz"
-	err = ioutil.WriteFile(tempDir+"/foo", []byte(data), 0644)
-	if err != nil {
-		t.Error(err)
-	}
-
-	empty, err = isDirectoryEmpty(tempDir)
-	if err != nil {
-		t.Error(err)
-	}
-	if empty == true {
-		t.Error("isDirectoryEmpty reporting true on non-empty directory")
-	}
-}
diff --git a/cmd/vendor_path.go b/cmd/vendor_path.go
deleted file mode 100644
index 0b3dc75..0000000
--- a/cmd/vendor_path.go
+++ /dev/null
@@ -1,44 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"os"
-	"path/filepath"
-
-	"github.com/Masterminds/cookoo"
-)
-
-// Return the path to the vendor directory.
-func VendorPath(c cookoo.Context) (string, error) {
-	vendor := c.Get("VendorDir", "vendor").(string)
-	filename := c.Get("yaml", "glide.yaml").(string)
-	cwd, err := os.Getwd()
-	if err != nil {
-		return "", err
-	}
-
-	// Find the directory that contains glide.yaml
-	yamldir, err := glideWD(cwd, filename)
-	if err != nil {
-		return cwd, err
-	}
-
-	gopath := filepath.Join(yamldir, vendor)
-
-	return gopath, nil
-}
-
-func glideWD(dir, filename string) (string, error) {
-	fullpath := filepath.Join(dir, filename)
-
-	if _, err := os.Stat(fullpath); err == nil {
-		return dir, nil
-	}
-
-	base := filepath.Dir(dir)
-	if base == dir {
-		return "", fmt.Errorf("Cannot resolve parent of %s", base)
-	}
-
-	return glideWD(base, filename)
-}
diff --git a/cmd/vendor_path_test.go b/cmd/vendor_path_test.go
deleted file mode 100644
index 258b8b5..0000000
--- a/cmd/vendor_path_test.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package cmd
-
-import (
-	"os"
-	"path/filepath"
-	"testing"
-)
-
-func TestGlideWD(t *testing.T) {
-	cwd, _ := os.Getwd()
-	filename := "glide.yaml"
-	found, err := glideWD(cwd, filename)
-	if err != nil {
-		t.Errorf("Failed to get Glide directory: %s", err)
-	}
-
-	if found != filepath.Dir(cwd) {
-		t.Errorf("Expected %s to match %s", found, filepath.Base(cwd))
-	}
-
-	// This should fail
-	cwd = "/No/Such/Dir"
-	found, err = glideWD(cwd, filename)
-	if err == nil {
-		t.Errorf("Expected to get an error on a non-existent directory, not %s", found)
-	}
-
-}
diff --git a/cmd/vendored.go b/cmd/vendored.go
deleted file mode 100644
index 6dfbda0..0000000
--- a/cmd/vendored.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package cmd
-
-import (
-	"os"
-	"path/filepath"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-// If we are updating the vendored dependencies. That is those stored in the
-// local project VCS.
-var updatingVendored = false
-
-// VendoredSetup is a command that does the setup for vendored directories.
-// If enabled (via update) it marks vendored directories that are being updated
-// and removed the old code. This should be a prefix to UpdateImports and
-// VendoredCleanUp should be a suffix to UpdateImports.
-func VendoredSetup(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	update := p.Get("update", false).(bool)
-	conf := p.Get("conf", nil).(*cfg.Config)
-
-	updatingVendored = update
-
-	return conf, nil
-}
-
-// VendoredCleanUp is a command that cleans up vendored codebases after an update.
-// If enabled (via update) it removes the VCS info from updated vendored
-// packages. This should be a suffix to UpdateImports and  VendoredSetup should
-// be a prefix to UpdateImports.
-func VendoredCleanUp(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	update := p.Get("update", true).(bool)
-	if update != true {
-		return false, nil
-	}
-	conf := p.Get("conf", nil).(*cfg.Config)
-
-	vend, err := VendorPath(c)
-	if err != nil {
-		return false, err
-	}
-
-	for _, dep := range conf.Imports {
-		if dep.UpdateAsVendored == true {
-			Info("Cleaning up vendored package %s\n", dep.Name)
-
-			// Remove the VCS directory
-			cwd := filepath.Join(vend, filepath.FromSlash(dep.Name))
-			repo, err := dep.GetRepo(cwd)
-			if err != nil {
-				Error("Error cleaning up %s:%s", dep.Name, err)
-				continue
-			}
-			t := repo.Vcs()
-			err = os.RemoveAll(cwd + string(os.PathSeparator) + "." + string(t))
-			if err != nil {
-				Error("Error cleaning up VCS dir for %s:%s", dep.Name, err)
-			}
-		}
-
-	}
-
-	return true, nil
-}
diff --git a/cmd/yaml.go b/cmd/yaml.go
deleted file mode 100644
index f05a1e9..0000000
--- a/cmd/yaml.go
+++ /dev/null
@@ -1,196 +0,0 @@
-package cmd
-
-import (
-	"fmt"
-	"io"
-	"io/ioutil"
-	"os"
-	"path/filepath"
-	"strings"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-	"github.com/Masterminds/glide/util"
-)
-
-// ParseYaml parses the glide.yaml format and returns a Configuration object.
-//
-// Params:
-//	- filename (string): YAML filename as a string
-//
-// Returns:
-//	- *cfg.Config: The configuration.
-func ParseYaml(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	fname := p.Get("filename", "glide.yaml").(string)
-	//conf := new(Config)
-	yml, err := ioutil.ReadFile(fname)
-	if err != nil {
-		return nil, err
-	}
-	conf, err := cfg.ConfigFromYaml(yml)
-	if err != nil {
-		return nil, err
-	}
-
-	return conf, nil
-}
-
-// ParseYamlString parses a YAML string. This is similar but different to
-// ParseYaml that parses an external file.
-//
-// Params:
-//	- yaml (string): YAML as a string.
-//
-// Returns:
-//	- *cfg.Config: The configuration.
-func ParseYamlString(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	yamlString := p.Get("yaml", "").(string)
-
-	conf, err := cfg.ConfigFromYaml([]byte(yamlString))
-	if err != nil {
-		return nil, err
-	}
-
-	return conf, nil
-}
-
-// GuardYaml protects the glide yaml file from being overwritten.
-func GuardYaml(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	fname := p.Get("filename", "glide.yaml").(string)
-	if _, err := os.Stat(fname); err == nil {
-		cwd, _ := os.Getwd()
-		return false, fmt.Errorf("Cowardly refusing to overwrite %s in %s", fname, cwd)
-	}
-
-	return true, nil
-}
-
-// WriteYaml writes the config as YAML.
-//
-// Params:
-//	- conf: A *cfg.Config to render.
-// 	- out (io.Writer): An output stream to write to. Default is os.Stdout.
-// 	- filename (string): If set, the file will be opened and the content will be written to it.
-func WriteYaml(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	conf := p.Get("conf", nil).(*cfg.Config)
-	toStdout := p.Get("toStdout", true).(bool)
-
-	data, err := conf.Marshal()
-	if err != nil {
-		return nil, err
-	}
-
-	var out io.Writer
-	if nn, ok := p.Has("filename"); ok && len(nn.(string)) > 0 {
-		file, err := os.Create(nn.(string))
-		if err != nil {
-		}
-		defer file.Close()
-		out = io.Writer(file)
-		//fmt.Fprint(out, yml)
-		out.Write(data)
-	} else if toStdout {
-		out = p.Get("out", os.Stdout).(io.Writer)
-		//fmt.Fprint(out, yml)
-		out.Write(data)
-	}
-
-	// Otherwise we supress output.
-	return true, nil
-}
-
-// WriteLock writes the lock as YAML.
-//
-// Params:
-//	- lockfile: A *cfg.Lockfile to render.
-// 	- out (io.Writer): An output stream to write to. Default is os.Stdout.
-func WriteLock(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	skip := p.Get("skip", false).(bool)
-	if skip {
-		return false, nil
-	}
-
-	lockfile := p.Get("lockfile", nil).(*cfg.Lockfile)
-
-	Info("Writing glide.lock file")
-
-	data, err := lockfile.Marshal()
-	if err != nil {
-		return nil, err
-	}
-
-	var out io.Writer
-	file, err := os.Create("glide.lock")
-	if err != nil {
-		return false, err
-	}
-	defer file.Close()
-	out = io.Writer(file)
-	out.Write(data)
-
-	return true, nil
-}
-
-// AddDependencies adds a list of *Dependency objects to the given *cfg.Config.
-//
-// This is used to merge in packages from other sources or config files.
-func AddDependencies(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	deps := p.Get("dependencies", []*cfg.Dependency{}).([]*cfg.Dependency)
-	config := p.Get("conf", nil).(*cfg.Config)
-
-	// Make a set of existing package names for quick comparison.
-	pkgSet := make(map[string]bool, len(config.Imports))
-	for _, p := range config.Imports {
-		pkgSet[p.Name] = true
-	}
-
-	// If a dep is not already present, add it.
-	for _, dep := range deps {
-		if _, ok := pkgSet[dep.Name]; ok {
-			Warn("Package %s is already in glide.yaml. Skipping.\n", dep.Name)
-			continue
-		}
-		config.Imports = append(config.Imports, dep)
-	}
-
-	return true, nil
-}
-
-// NormalizeName takes a package name and normalizes it to the top level package.
-//
-// For example, golang.org/x/crypto/ssh becomes golang.org/x/crypto. 'ssh' is
-// returned as extra data.
-func NormalizeName(name string) (string, string) {
-
-	// Fastpath check if a name in the GOROOT. There is an issue when a pkg
-	// is in the GOROOT and GetRootFromPackage tries to look it up because it
-	// expects remote names.
-	b, err := util.GetBuildContext()
-	if err == nil {
-		p := filepath.Join(b.GOROOT, "src", filepath.FromSlash(name))
-		if _, err := os.Stat(p); err == nil {
-			return name, ""
-		}
-	}
-
-	root := util.GetRootFromPackage(name)
-	extra := strings.TrimPrefix(name, root)
-	if len(extra) > 0 && extra != "/" {
-		extra = strings.TrimPrefix(extra, "/")
-	} else {
-		// If extra is / (which is what it would be here) we want to return ""
-		extra = ""
-	}
-
-	return root, extra
-
-	// parts := strings.SplitN(name, "/", 4)
-	// extra := ""
-	// if len(parts) < 3 {
-	// 	return name, extra
-	// }
-	// if len(parts) == 4 {
-	// 	extra = parts[3]
-	// }
-	// return strings.Join(parts[0:3], "/"), extra
-}
diff --git a/cmd/yaml_test.go b/cmd/yaml_test.go
deleted file mode 100644
index a1e3e6b..0000000
--- a/cmd/yaml_test.go
+++ /dev/null
@@ -1,124 +0,0 @@
-package cmd
-
-import (
-	"testing"
-
-	"github.com/Masterminds/cookoo"
-	"github.com/Masterminds/glide/cfg"
-)
-
-var yamlFile = `
-package: fake/testing
-import:
-  - package: github.com/kylelemons/go-gypsy
-    subpackages:
-      - yaml
-  # Intentionally left spaces at end of next line.
-  - package: github.com/Masterminds/convert
-    repo: git@github.com:Masterminds/convert.git
-    ref: a9949121a2e2192ca92fa6dddfeaaa4a4412d955
-    subpackages:
-      - color
-      - nautical
-      - radial
-    os:
-      - linux
-    arch:
-      - i386
-      - arm
-  - package: github.com/Masterminds/structable
-
-devimport:
-  - package: github.com/kylelemons/go-gypsy
-`
-
-func TestFromYaml(t *testing.T) {
-	reg, router, cxt := cookoo.Cookoo()
-
-	reg.Route("t", "Testing").
-		Does(ParseYamlString, "cfg").Using("yaml").WithDefault(yamlFile)
-
-	if err := router.HandleRequest("t", cxt, false); err != nil {
-		t.Errorf("Failed to parse YAML: %s", err)
-	}
-
-	conf := cxt.Get("cfg", nil).(*cfg.Config)
-
-	if conf.Name != "fake/testing" {
-		t.Errorf("Expected name to be 'fake/teting', not '%s'", conf.Name)
-	}
-
-	if len(conf.Imports) != 3 {
-		t.Errorf("Expected 3 imports, got %d", len(conf.Imports))
-	}
-
-	if conf.Imports.Get("github.com/Masterminds/convert") == nil {
-		t.Error("Expected Imports.Get to return Dependency")
-	}
-
-	if conf.Imports.Get("github.com/doesnot/exist") != nil {
-		t.Error("Execpted Imports.Get to return nil")
-	}
-
-	var imp *cfg.Dependency
-	for _, d := range conf.Imports {
-		if d.Name == "github.com/Masterminds/convert" {
-			imp = d
-		}
-	}
-
-	if imp == nil {
-		t.Errorf("Expected the convert package, got nothing")
-	}
-
-	if len(imp.Subpackages) != 3 {
-		t.Errorf("Expected 3 subpackages. got %d", len(imp.Subpackages))
-	}
-
-	if imp.Subpackages[0] != "color" {
-		t.Errorf("Expected first subpackage to be 'color', got '%s'", imp.Subpackages[0])
-	}
-
-	if len(imp.Os) != 1 {
-		t.Errorf("Expected Os: SOMETHING")
-	} else if imp.Os[0] != "linux" {
-		t.Errorf("Expected Os: linux")
-	}
-
-	if len(imp.Arch) != 2 {
-		t.Error("Expected two Archs.")
-	} else if imp.Arch[0] != "i386" {
-		t.Errorf("Expected arch 1 to be i386, got %s.", imp.Arch[0])
-	} else if imp.Arch[1] != "arm" {
-		t.Error("Expected arch 2 to be arm.")
-	}
-
-	if imp.Repository != "git@github.com:Masterminds/convert.git" {
-		t.Errorf("Got wrong repo %s on %s", imp.Repository, imp.Name)
-	}
-	if imp.Reference != "a9949121a2e2192ca92fa6dddfeaaa4a4412d955" {
-		t.Errorf("Got wrong reference.")
-	}
-
-	if len(conf.DevImports) != 1 {
-		t.Errorf("Expected one dev import.")
-	}
-
-}
-
-func TestNormalizeName(t *testing.T) {
-	packages := map[string]string{
-		"github.com/Masterminds/cookoo/web/io/foo": "github.com/Masterminds/cookoo",
-		"golang.org/x/crypto/ssh":                  "golang.org/x/crypto",
-		//"technosophos.me/x/totally/fake/package":   "technosophos.me/x/totally",
-		"incomplete/example": "incomplete/example",
-		"net":                "net",
-	}
-	for start, expected := range packages {
-		if finish, extra := NormalizeName(start); expected != finish {
-			t.Errorf("Expected '%s', got '%s'", expected, finish)
-		} else if start != finish && start != finish+"/"+extra {
-			t.Errorf("Expected %s to end with %s", finish, extra)
-		}
-	}
-}
diff --git a/cmd/delete.go b/dependency/delete.go
similarity index 75%
rename from cmd/delete.go
rename to dependency/delete.go
index 202377a..39a6367 100644
--- a/cmd/delete.go
+++ b/dependency/delete.go
@@ -1,4 +1,4 @@
-package cmd
+package dependency
 
 import (
 	"errors"
@@ -6,38 +6,32 @@
 	"path/filepath"
 	"strings"
 
-	"github.com/Masterminds/cookoo"
 	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
 )
 
-// DeleteUnusedPackages removes packages from vendor/ that are no longer used.
-func DeleteUnusedPackages(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	// Conditional opt-in to removed unused dependencies.
-	optIn := p.Get("optIn", false).(bool)
-	if optIn != true {
-		return nil, nil
-	}
-
-	vpath, err := VendorPath(c)
+// DeleteUnused removes packages from vendor/ that are no longer used.
+//
+// TODO: This should work off of a Lock file, not glide.yaml.
+func DeleteUnused(conf *cfg.Config) error {
+	vpath, err := gpath.Vendor()
 	if err != nil {
-		return nil, err
+		return err
 	}
 	if vpath == "" {
-		return false, errors.New("Vendor not set")
+		return errors.New("Vendor not set")
 	}
 
 	// Build directory tree of what to keep.
-	cfg := p.Get("conf", nil).(*cfg.Config)
 	var pkgList []string
-	for _, dep := range cfg.Imports {
+	for _, dep := range conf.Imports {
 		pkgList = append(pkgList, dep.Name)
 	}
 
-	// Callback function for filepath.Walk to delete packages not in yaml file.
 	var searchPath string
-
 	var markForDelete []string
-
+	// Callback function for filepath.Walk to delete packages not in yaml file.
 	fn := func(path string, info os.FileInfo, err error) error {
 		// Bubble up the error
 		if err != nil {
@@ -49,7 +43,6 @@
 		}
 
 		localPath := strings.TrimPrefix(path, searchPath)
-
 		keep := false
 
 		// First check if the path has a prefix that's a specific package. If
@@ -93,18 +86,18 @@
 	searchPath = vpath + string(os.PathSeparator)
 	err = filepath.Walk(searchPath, fn)
 	if err != nil {
-		return false, err
+		return err
 	}
 
 	// Perform the actual delete.
 	for _, path := range markForDelete {
 		localPath := strings.TrimPrefix(path, searchPath)
-		Info("Removing unused package: %s\n", localPath)
+		msg.Info("Removing unused package: %s\n", localPath)
 		rerr := os.RemoveAll(path)
 		if rerr != nil {
-			return false, rerr
+			return rerr
 		}
 	}
 
-	return nil, nil
+	return nil
 }
diff --git a/dependency/resolver.go b/dependency/resolver.go
index fcd4ddd..e08f738 100644
--- a/dependency/resolver.go
+++ b/dependency/resolver.go
@@ -69,6 +69,40 @@
 	return false, nil
 }
 
+// VersionHandler sets the version for a package when found while scanning.
+//
+// When a package if found it needs to be on the correct version before
+// scanning its contents to be sure to pick up the right elements for that
+// version.
+type VersionHandler interface {
+
+	// Process provides an opportunity to process the codebase for version setting.
+	Process(pkg string) error
+
+	// SetVersion sets the version for a package. An error is returned if there
+	// was a problem setting the version.
+	SetVersion(pkg string) error
+}
+
+// DefaultVersionHandler is the default handler for setting the version.
+//
+// The default handler leaves the current version and skips setting a version.
+// For a handler that alters the version see the handler included in the repo
+// package as part of the installer.
+type DefaultVersionHandler struct{}
+
+// Process a package to aide in version setting.
+func (d *DefaultVersionHandler) Process(pkg string) error {
+	return nil
+}
+
+// SetVersion here sends a message when a package is found noting that it
+// did not set the version.
+func (d *DefaultVersionHandler) SetVersion(pkg string) error {
+	msg.Warn("Version not set for package %s", pkg)
+	return nil
+}
+
 // Resolver resolves a dependency tree.
 //
 // It operates in two modes:
@@ -79,11 +113,13 @@
 // Local resolution is for guessing initial dependencies. Vendor resolution is
 // for determining vendored dependencies.
 type Resolver struct {
-	Handler      MissingPackageHandler
-	basedir      string
-	VendorDir    string
-	BuildContext *util.BuildCtxt
-	seen         map[string]bool
+	Handler        MissingPackageHandler
+	VersionHandler VersionHandler
+	basedir        string
+	VendorDir      string
+	BuildContext   *util.BuildCtxt
+	seen           map[string]bool
+	Config         *cfg.Config
 
 	// Items already in the queue.
 	alreadyQ map[string]bool
@@ -114,13 +150,17 @@
 	}
 
 	r := &Resolver{
-		Handler:      &DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}},
-		basedir:      basedir,
-		VendorDir:    vdir,
-		BuildContext: buildContext,
-		seen:         map[string]bool{},
-		alreadyQ:     map[string]bool{},
-		findCache:    map[string]*PkgInfo{},
+		Handler:        &DefaultMissingPackageHandler{Missing: []string{}, Gopath: []string{}},
+		VersionHandler: &DefaultVersionHandler{},
+		basedir:        basedir,
+		VendorDir:      vdir,
+		BuildContext:   buildContext,
+		seen:           map[string]bool{},
+		alreadyQ:       map[string]bool{},
+		findCache:      map[string]*PkgInfo{},
+
+		// The config instance here should really be replaced with a real one.
+		Config: &cfg.Config{},
 	}
 
 	// TODO: Make sure the build context is correctly set up. Especially in
@@ -246,6 +286,12 @@
 	var failedDep string
 	for e := queue.Front(); e != nil; e = e.Next() {
 		dep := e.Value.(string)
+		t := strings.TrimPrefix(e.Value.(string), r.VendorDir+string(os.PathSeparator))
+		if r.Config.HasIgnore(t) {
+			msg.Info("Ignoring: %s", t)
+			continue
+		}
+		r.VersionHandler.Process(t)
 		//msg.Warn("#### %s ####", dep)
 		//msg.Info("Seen Count: %d", len(r.seen))
 		// Catch the outtermost dependency.
@@ -282,7 +328,28 @@
 	}
 
 	res := make([]string, 0, queue.Len())
+
+	// In addition to generating a list
 	for e := queue.Front(); e != nil; e = e.Next() {
+		t := strings.TrimPrefix(e.Value.(string), r.VendorDir+string(os.PathSeparator))
+		root, sp := util.NormalizeName(t)
+
+		// TODO(mattfarina): Need to eventually support devImport
+		existing := r.Config.Imports.Get(root)
+		if existing != nil {
+			if sp != "" && !existing.HasSubpackage(sp) {
+				existing.Subpackages = append(existing.Subpackages, sp)
+			}
+		} else {
+			newDep := &cfg.Dependency{
+				Name: root,
+			}
+			if sp != "" {
+				newDep.Subpackages = []string{sp}
+			}
+
+			r.Config.Imports = append(r.Config.Imports, newDep)
+		}
 		res = append(res, e.Value.(string))
 	}
 
@@ -326,6 +393,11 @@
 // If it cannot resolve the pkg, it will return an error.
 func (r *Resolver) imports(pkg string) ([]string, error) {
 
+	if r.Config.HasIgnore(pkg) {
+		msg.Debug("Ignoring %s", pkg)
+		return []string{}, nil
+	}
+
 	// If this pkg is marked seen, we don't scan it again.
 	if _, ok := r.seen[pkg]; ok {
 		msg.Debug("Already saw %s", pkg)
@@ -353,6 +425,10 @@
 	// We are only looking for dependencies in vendor. No root, cgo, etc.
 	buf := []string{}
 	for _, imp := range p.Imports {
+		if r.Config.HasIgnore(imp) {
+			msg.Debug("Ignoring %s", imp)
+			continue
+		}
 		info := r.FindPkg(imp)
 		switch info.Loc {
 		case LocUnknown:
@@ -363,12 +439,14 @@
 			}
 			if found {
 				buf = append(buf, filepath.Join(r.VendorDir, filepath.FromSlash(imp)))
+				r.VersionHandler.SetVersion(imp)
 				continue
 			}
 			r.seen[info.Path] = true
 		case LocVendor:
 			//msg.Debug("Vendored: %s", imp)
 			buf = append(buf, info.Path)
+			r.VersionHandler.SetVersion(imp)
 		case LocGopath:
 			found, err := r.Handler.OnGopath(imp)
 			if err != nil {
@@ -379,6 +457,7 @@
 			// in a less-than-perfect, but functional, situation.
 			if found {
 				buf = append(buf, filepath.Join(r.VendorDir, filepath.FromSlash(imp)))
+				r.VersionHandler.SetVersion(imp)
 				continue
 			}
 			msg.Warn("Package %s is on GOPATH, but not vendored. Ignoring.", imp)
@@ -516,6 +595,13 @@
 	return fi.Mode()&os.ModeSymlink == os.ModeSymlink
 }
 
+// Returns true if this is a directory that could have source code, false otherwise.
+//
+// Directories with _ or . prefixes are skipped, as are testdata and vendor.
+func IsSrcDir(fi os.FileInfo) bool {
+	return srcDir(fi)
+}
+
 func srcDir(fi os.FileInfo) bool {
 	if !fi.IsDir() {
 		return false
diff --git a/dependency/resolver_test.go b/dependency/resolver_test.go
index 46268f2..0edb764 100644
--- a/dependency/resolver_test.go
+++ b/dependency/resolver_test.go
@@ -21,7 +21,6 @@
 	}
 
 	expect := []string{
-		"github.com/Masterminds/cookoo",
 		"github.com/Masterminds/semver",
 		"github.com/Masterminds/vcs",
 		"gopkg.in/yaml.v2",
@@ -53,8 +52,8 @@
 		t.Fatalf("Failed to resolve: %s", err)
 	}
 
-	if len(l) < 8 {
-		t.Errorf("Expected at least 8 deps, got %d: %s", len(l))
+	if len(l) < 4 {
+		t.Errorf("Expected at least 4 deps, got %d: %s", len(l))
 	}
 }
 
@@ -79,8 +78,8 @@
 	// These are build dependencies of Glide, so we know they are here.
 	deps := []*cfg.Dependency{
 		&cfg.Dependency{Name: "github.com/codegangsta/cli"},
-		&cfg.Dependency{Name: "github.com/Masterminds/cookoo"},
 		&cfg.Dependency{Name: "github.com/Masterminds/semver"},
+		&cfg.Dependency{Name: "github.com/Masterminds/vcs"},
 		&cfg.Dependency{Name: "gopkg.in/yaml.v2"},
 	}
 
diff --git a/gb/gb.go b/gb/gb.go
new file mode 100644
index 0000000..5a07c62
--- /dev/null
+++ b/gb/gb.go
@@ -0,0 +1,69 @@
+package gb
+
+import (
+	"encoding/json"
+	"os"
+	"path/filepath"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	"github.com/Masterminds/glide/util"
+)
+
+// Returns true if this dir has a GB-flavored manifest file.
+func Has(dir string) bool {
+	path := filepath.Join(dir, "vendor/manifest")
+	_, err := os.Stat(path)
+	return err == nil
+}
+
+// Parse parses a GB-flavored manifest file.
+func Parse(dir string) ([]*cfg.Dependency, error) {
+	path := filepath.Join(dir, "vendor/manifest")
+	if fi, err := os.Stat(path); err != nil || fi.IsDir() {
+		return []*cfg.Dependency{}, nil
+	}
+
+	msg.Info("Found GB manifest file.\n")
+	buf := []*cfg.Dependency{}
+	file, err := os.Open(path)
+	if err != nil {
+		return buf, err
+	}
+	defer file.Close()
+
+	man := Manifest{}
+
+	dec := json.NewDecoder(file)
+	if err := dec.Decode(&man); err != nil {
+		return buf, err
+	}
+
+	seen := map[string]bool{}
+
+	for _, d := range man.Dependencies {
+		pkg, sub := util.NormalizeName(d.Importpath)
+		if _, ok := seen[pkg]; ok {
+			if len(sub) == 0 {
+				continue
+			}
+			for _, dep := range buf {
+				if dep.Name == pkg {
+					dep.Subpackages = append(dep.Subpackages, sub)
+				}
+			}
+		} else {
+			seen[pkg] = true
+			dep := &cfg.Dependency{
+				Name:       pkg,
+				Reference:  d.Revision,
+				Repository: d.Repository,
+			}
+			if len(sub) > 0 {
+				dep.Subpackages = []string{sub}
+			}
+			buf = append(buf, dep)
+		}
+	}
+	return buf, nil
+}
diff --git a/glide.go b/glide.go
index 0ecd89b..e7900a8 100644
--- a/glide.go
+++ b/glide.go
@@ -39,9 +39,10 @@
 import (
 	"path/filepath"
 
-	"github.com/Masterminds/glide/cmd"
+	"github.com/Masterminds/glide/action"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/repo"
 
-	"github.com/Masterminds/cookoo"
 	"github.com/codegangsta/cli"
 
 	"fmt"
@@ -66,7 +67,7 @@
 		  subpackages: yaml
 			flatten: true
 
-NOTE: As of Glide 0.5, the commands 'in', 'into', 'gopath', 'status', and 'env'
+NOTE: As of Glide 0.5, the commands 'into', 'gopath', 'status', and 'env'
 no longer exist.
 `
 
@@ -74,11 +75,6 @@
 var VendorDir = "vendor"
 
 func main() {
-	reg, router, cxt := cookoo.Cookoo()
-	cxt.Put("VendorDir", VendorDir)
-
-	routes(reg, cxt)
-
 	app := cli.NewApp()
 	app.Name = "glide"
 	app.Usage = usage
@@ -109,17 +105,16 @@
 		},
 	}
 	app.CommandNotFound = func(c *cli.Context, command string) {
-		cxt.Put("os.Args", os.Args)
-		cxt.Put("command", command)
-		setupHandler(c, "@plugin", cxt, router)
+		// TODO: Set some useful env vars.
+		action.Plugin(command, os.Args)
 	}
-
-	app.Commands = commands(cxt, router)
+	app.Before = startup
+	app.Commands = commands()
 
 	app.Run(os.Args)
 }
 
-func commands(cxt cookoo.Context, router *cookoo.Router) []cli.Command {
+func commands() []cli.Command {
 	return []cli.Command{
 		{
 			Name:      "create",
@@ -139,8 +134,7 @@
 				},
 			},
 			Action: func(c *cli.Context) {
-				cxt.Put("skipImport", c.Bool("skip-import"))
-				setupHandler(c, "create", cxt, router)
+				action.Create(".", c.Bool("skip-import"))
 			},
 		},
 		{
@@ -198,21 +192,52 @@
 					fmt.Println("Oops! Package name is required.")
 					os.Exit(1)
 				}
-				cxt.Put("forceUpdate", c.Bool("force"))
-				cxt.Put("packages", []string(c.Args()))
-				cxt.Put("skipFlatten", !c.Bool("no-recursive"))
-				cxt.Put("insecure", c.Bool("insecure"))
-				cxt.Put("useCache", c.Bool("cache"))
-				cxt.Put("cacheGopath", c.Bool("cache-gopath"))
-				cxt.Put("useGopath", c.Bool("use-gopath"))
-				// FIXME: Are these used anywhere?
-				if c.Bool("import") {
-					cxt.Put("importGodeps", true)
-					cxt.Put("importGPM", true)
-					cxt.Put("importGb", true)
+
+				inst := &repo.Installer{
+					Force:          c.Bool("force"),
+					UseCache:       c.Bool("cache"),
+					UseGopath:      c.Bool("use-gopath"),
+					UseCacheGopath: c.Bool("cache-gopath"),
+					UpdateVendored: c.Bool("update-vendored"),
 				}
-				cxt.Put("updateVendoredDeps", c.Bool("update-vendored"))
-				setupHandler(c, "get", cxt, router)
+				packages := []string(c.Args())
+				insecure := c.Bool("insecure")
+				action.Get(packages, inst, insecure, c.Bool("no-recursive"))
+			},
+		},
+		{
+			Name:      "remove",
+			ShortName: "rm",
+			Usage:     "Remove a package from the glide.yaml file, and regenerate the lock file.",
+			Description: `This takes one or more package names, and removes references from the glide.yaml file.
+	This will rebuild the glide lock file with the following constraints:
+
+	- Dependencies are re-negotiated. Any that are no longer used are left out of the lock.
+	- Minor version re-nogotiation is performed on remaining dependencies.
+	- No updates are peformed. You may want to run 'glide up' to accomplish that.
+`,
+			Flags: []cli.Flag{
+				cli.BoolFlag{
+					Name:  "delete,d",
+					Usage: "Also delete from vendor/ any packages that are no longer used.",
+				},
+			},
+			Action: func(c *cli.Context) {
+				if len(c.Args()) < 1 {
+					fmt.Println("Oops! At least one package name is required.")
+					os.Exit(1)
+				}
+
+				if c.Bool("delete") {
+					// FIXME: Implement this in the installer.
+					fmt.Println("Delete is not currently implemented.")
+				}
+
+				inst := &repo.Installer{
+					Force: c.Bool("force"),
+				}
+				packages := []string(c.Args())
+				action.Remove(packages, inst)
 			},
 		},
 		{
@@ -229,8 +254,7 @@
 						},
 					},
 					Action: func(c *cli.Context) {
-						cxt.Put("toPath", c.String("file"))
-						setupHandler(c, "import godep", cxt, router)
+						action.ImportGodep(c.String("file"))
 					},
 				},
 				{
@@ -243,8 +267,7 @@
 						},
 					},
 					Action: func(c *cli.Context) {
-						cxt.Put("toPath", c.String("file"))
-						setupHandler(c, "import gpm", cxt, router)
+						action.ImportGPM(c.String("file"))
 					},
 				},
 				{
@@ -257,8 +280,7 @@
 						},
 					},
 					Action: func(c *cli.Context) {
-						cxt.Put("toPath", c.String("file"))
-						setupHandler(c, "import gb", cxt, router)
+						action.ImportGB(c.String("file"))
 					},
 				},
 			},
@@ -268,7 +290,7 @@
 			Usage:       "Print the name of this project.",
 			Description: `Read the glide.yaml file and print the name given on the 'package' line.`,
 			Action: func(c *cli.Context) {
-				setupHandler(c, "name", cxt, router)
+				action.Name()
 			},
 		},
 		{
@@ -281,40 +303,28 @@
 
 			$ go test $(glide novendor)
 `,
+			Flags: []cli.Flag{
+				cli.StringFlag{
+					Name:  "dir,d",
+					Usage: "Specify a directory to run novendor against.",
+					Value: ".",
+				},
+				cli.BoolFlag{
+					Name:  "no-subdir,x",
+					Usage: "Specify this to prevent nv from append '/...' to all directories.",
+				},
+			},
 			Action: func(c *cli.Context) {
-				setupHandler(c, "nv", cxt, router)
+				action.NoVendor(c.String("dir"), true, !c.Bool("no-subdir"))
 			},
 		},
-		// 	{
-		// 		Name:  "pin",
-		// 		Usage: "Print a YAML file with all of the packages pinned to the current version",
-		// 		Description: `Begins with the current glide.yaml and sets an absolute ref
-		// for every package. The version is derived from the repository version. It will be
-		// either a commit or a tag, depending on the state of the VCS tree.
-		//
-		// By default, output is written to standard out. However, if you supply a filename,
-		// the data will be written to that:
-		//
-		//     $ glide pin glide.yaml
-		//
-		// The above will overwrite your glide.yaml file. You have been warned.
-		// `,
-		// 		Action: func(c *cli.Context) {
-		// 			outfile := ""
-		// 			if len(c.Args()) == 1 {
-		// 				outfile = c.Args()[0]
-		// 			}
-		// 			cxt.Put("toPath", outfile)
-		// 			setupHandler(c, "pin", cxt, router)
-		// 		},
-		// 	},
 		{
 			Name:  "rebuild",
 			Usage: "Rebuild ('go build') the dependencies",
 			Description: `This rebuilds the packages' '.a' files. On some systems
 	this can improve performance on subsequent 'go run' and 'go build' calls.`,
 			Action: func(c *cli.Context) {
-				setupHandler(c, "rebuild", cxt, router)
+				action.Rebuild()
 			},
 		},
 		{
@@ -334,20 +344,16 @@
 					Usage: "Delete vendor packages not specified in config.",
 				},
 				cli.BoolFlag{
-					Name:  "no-recursive, quick",
-					Usage: "Disable updating dependencies' dependencies. Only update things in glide.yaml.",
-				},
-				cli.BoolFlag{
 					Name:  "force",
-					Usage: "If there was a change in the repo or VCS switch to new one. Warning, changes will be lost.",
+					Usage: "If there was a change in the repo or VCS switch to new one. Warning: changes will be lost.",
 				},
 				cli.BoolFlag{
 					Name:  "update-vendored, u",
-					Usage: "Update vendored packages (without local VCS repo). Warning, changes will be lost.",
+					Usage: "Update vendored packages (without local VCS repo). Warning: this may destroy local modifications to vendor/.",
 				},
 				cli.StringFlag{
 					Name:  "file, f",
-					Usage: "Save all of the discovered dependencies to a Glide YAML file.",
+					Usage: "Save all of the discovered dependencies to a Glide YAML file. (DEPRECATED: This has no impact.)",
 				},
 				cli.BoolFlag{
 					Name:  "cache",
@@ -363,24 +369,17 @@
 				},
 			},
 			Action: func(c *cli.Context) {
-				cxt.Put("deleteOptIn", c.Bool("delete"))
-				cxt.Put("forceUpdate", c.Bool("force"))
-				cxt.Put("skipFlatten", c.Bool("no-recursive"))
-				cxt.Put("deleteFlatten", c.Bool("delete-flatten"))
-				cxt.Put("toPath", c.String("file"))
-				cxt.Put("toStdout", false)
-				cxt.Put("useCache", c.Bool("cache"))
-				cxt.Put("cacheGopath", c.Bool("cache-gopath"))
-				cxt.Put("useGopath", c.Bool("use-gopath"))
-				if c.Bool("import") {
-					cxt.Put("importGodeps", true)
-					cxt.Put("importGPM", true)
-					cxt.Put("importGb", true)
+				installer := &repo.Installer{
+					DeleteUnused:   c.Bool("deleteOptIn"),
+					UpdateVendored: c.Bool("update-vendored"),
+					Force:          c.Bool("force"),
+					UseCache:       c.Bool("cache"),
+					UseCacheGopath: c.Bool("cache-gopath"),
+					UseGopath:      c.Bool("use-gopath"),
+					Home:           gpath.Home(),
 				}
-				cxt.Put("updateVendoredDeps", c.Bool("update-vendored"))
 
-				cxt.Put("packages", []string(c.Args()))
-				setupHandler(c, "install", cxt, router)
+				action.Install(installer)
 			},
 		},
 		{
@@ -447,24 +446,17 @@
 				},
 			},
 			Action: func(c *cli.Context) {
-				cxt.Put("deleteOptIn", c.Bool("delete"))
-				cxt.Put("forceUpdate", c.Bool("force"))
-				cxt.Put("skipFlatten", c.Bool("no-recursive"))
-				cxt.Put("deleteFlatten", c.Bool("delete-flatten"))
-				cxt.Put("toPath", c.String("file"))
-				cxt.Put("toStdout", false)
-				cxt.Put("useCache", c.Bool("cache"))
-				cxt.Put("cacheGopath", c.Bool("cache-gopath"))
-				cxt.Put("useGopath", c.Bool("use-gopath"))
-				if c.Bool("import") {
-					cxt.Put("importGodeps", true)
-					cxt.Put("importGPM", true)
-					cxt.Put("importGb", true)
+				installer := &repo.Installer{
+					DeleteUnused:   c.Bool("deleteOptIn"),
+					UpdateVendored: c.Bool("update-vendored"),
+					Force:          c.Bool("force"),
+					UseCache:       c.Bool("cache"),
+					UseCacheGopath: c.Bool("cache-gopath"),
+					UseGopath:      c.Bool("use-gopath"),
+					Home:           gpath.Home(),
 				}
-				cxt.Put("updateVendoredDeps", c.Bool("update-vendored"))
 
-				cxt.Put("packages", []string(c.Args()))
-				setupHandler(c, "update", cxt, router)
+				action.Update(installer, c.Bool("no-recursive"))
 			},
 		},
 		{
@@ -477,7 +469,7 @@
 	vendor/ are only included if they are referenced by the main project or
 	one of its dependencies.`,
 			Action: func(c *cli.Context) {
-				setupHandler(c, "tree", cxt, router)
+				action.Tree(".", false)
 			},
 		},
 		{
@@ -492,247 +484,19 @@
 			vendor are only included if they are used by the project.
 			`,
 			Action: func(c *cli.Context) {
-				setupHandler(c, "list", cxt, router)
+				action.List(".", true)
 			},
 		},
 		{
 			Name:  "about",
 			Usage: "Learn about Glide",
 			Action: func(c *cli.Context) {
-				setupHandler(c, "about", cxt, router)
+				action.About()
 			},
 		},
 	}
 }
 
-func setupHandler(c *cli.Context, route string, cxt cookoo.Context, router *cookoo.Router) {
-	cxt.Put("q", c.GlobalBool("quiet"))
-	cxt.Put("debug", c.GlobalBool("debug"))
-	cxt.Put("no-color", c.GlobalBool("no-color"))
-	cxt.Put("yaml", c.GlobalString("yaml"))
-	cxt.Put("home", c.GlobalString("home"))
-	cxt.Put("cliArgs", c.Args())
-	if err := router.HandleRequest(route, cxt, false); err != nil {
-		fmt.Printf("Oops! %s\n", err)
-		os.Exit(1)
-	}
-}
-
-func routes(reg *cookoo.Registry, cxt cookoo.Context) {
-	reg.Route("@startup", "Parse args and send to the right subcommand.").
-		// TODO: Add setup for debug in addition to quiet.
-		Does(cmd.BeQuiet, "quiet").
-		Using("quiet").From("cxt:q").
-		Using("debug").From("cxt:debug").
-		Does(cmd.CheckColor, "no-color").
-		Using("no-color").From("cxt:no-color").
-		Does(cmd.VersionGuard, "v")
-
-	reg.Route("@ready", "Prepare for glide commands.").
-		Does(cmd.ReadyToGlide, "ready").Using("filename").From("cxt:yaml").
-		Does(cmd.ParseYaml, "cfg").Using("filename").From("cxt:yaml").
-		Does(cmd.EnsureCacheDir, "_").Using("home").From("cxt:home")
-
-	reg.Route("get", "Install a pkg in vendor, and store the results in the glide.yaml").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.CowardMode, "_").
-		Does(cmd.GetAll, "goget").
-		Using("packages").From("cxt:packages").
-		Using("conf").From("cxt:cfg").
-		Using("insecure").From("cxt:insecure").
-		Does(cmd.VendoredSetup, "cfg").
-		Using("conf").From("cxt:cfg").
-		Using("update").From("cxt:updateVendoredDeps").
-		Does(cmd.UpdateImports, "dependencies").
-		Using("conf").From("cxt:cfg").
-		Using("force").From("cxt:forceUpdate").
-		//Using("packages").From("cxt:packages").
-		Using("home").From("cxt:home").
-		Using("cache").From("cxt:useCache").
-		Using("cacheGopath").From("cxt:cacheGopath").
-		Using("useGopath").From("cxt:useGopath").
-		Does(cmd.SetReference, "version").Using("conf").From("cxt:cfg").
-		Does(cmd.Flatten, "flattened").Using("conf").From("cxt:cfg").
-		//Using("packages").From("cxt:packages").
-		Using("force").From("cxt:forceUpdate").
-		Using("home").From("cxt:home").
-		Using("cache").From("cxt:useCache").
-		Using("cacheGopath").From("cxt:cacheGopath").
-		Using("useGopath").From("cxt:useGopath").
-		Does(cmd.VendoredCleanUp, "_").
-		Using("conf").From("cxt:flattened").
-		Using("update").From("cxt:updateVendoredDeps").
-		Does(cmd.WriteYaml, "out").
-		Using("conf").From("cxt:cfg").
-		Using("filename").WithDefault("glide.yaml").From("cxt:yaml").
-		Does(cmd.WriteLock, "lock").
-		Using("lockfile").From("cxt:Lockfile")
-
-	reg.Route("install", "Install dependencies.").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.CowardMode, "_").
-		Does(cmd.LockFileExists, "_").
-		Does(cmd.LoadLockFile, "lock").
-		Using("conf").From("cxt:cfg").
-		Does(cmd.Mkdir, "dir").Using("dir").WithDefault(VendorDir).
-		Does(cmd.DeleteUnusedPackages, "deleted").
-		Using("conf").From("cxt:cfg").
-		Using("optIn").From("cxt:deleteOptIn").
-		Does(cmd.VendoredSetup, "cfg").
-		Using("conf").From("cxt:cfg").
-		Using("update").From("cxt:updateVendoredDeps").
-		Does(cmd.Install, "icfg").
-		Using("conf").From("cxt:cfg").
-		Using("lock").From("cxt:lock").
-		Using("home").From("cxt:home").
-		Does(cmd.SetReference, "version").Using("conf").From("cxt:icfg").
-		Does(cmd.VendoredCleanUp, "_").
-		Using("conf").From("cxt:icfg").
-		Using("update").From("cxt:updateVendoredDeps")
-
-	reg.Route("update", "Update dependencies.").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.CowardMode, "_").
-		Does(cmd.Mkdir, "dir").Using("dir").WithDefault(VendorDir).
-		Does(cmd.DeleteUnusedPackages, "deleted").
-		Using("conf").From("cxt:cfg").
-		Using("optIn").From("cxt:deleteOptIn").
-		Does(cmd.VendoredSetup, "cfg").
-		Using("conf").From("cxt:cfg").
-		Using("update").From("cxt:updateVendoredDeps").
-		Does(cmd.UpdateImports, "dependencies").
-		Using("conf").From("cxt:cfg").
-		Using("force").From("cxt:forceUpdate").
-		Using("packages").From("cxt:packages").
-		Using("home").From("cxt:home").
-		Using("cache").From("cxt:useCache").
-		Using("cacheGopath").From("cxt:cacheGopath").
-		Using("useGopath").From("cxt:useGopath").
-		Does(cmd.SetReference, "version").Using("conf").From("cxt:cfg").
-		Does(cmd.Flatten, "flattened").Using("conf").From("cxt:cfg").
-		//Using("packages").From("cxt:packages").
-		Using("force").From("cxt:forceUpdate").
-		Using("skip").From("cxt:skipFlatten").
-		Using("home").From("cxt:home").
-		Using("cache").From("cxt:useCache").
-		Using("cacheGopath").From("cxt:cacheGopath").
-		Using("useGopath").From("cxt:useGopath").
-		Does(cmd.VendoredCleanUp, "_").
-		Using("conf").From("cxt:flattened").
-		Using("update").From("cxt:updateVendoredDeps").
-		Does(cmd.WriteYaml, "out").
-		Using("conf").From("cxt:cfg").
-		Using("filename").From("cxt:toPath").
-		Using("toStdout").From("cxt:toStdout").
-		Does(cmd.WriteLock, "lock").
-		Using("lockfile").From("cxt:Lockfile").
-		Using("skip").From("cxt:skipFlatten")
-
-	//Does(cmd.Rebuild, "rebuild").Using("conf").From("cxt:cfg")
-
-	reg.Route("rebuild", "Rebuild dependencies").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.CowardMode, "_").
-		Does(cmd.Rebuild, "rebuild").Using("conf").From("cxt:cfg")
-
-	reg.Route("pin", "Print a YAML file with all of the packages pinned to the current version.").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.Flatten, "flattened").Using("conf").From("cxt:cfg").
-		Using("packages").From("cxt:packages").
-		Using("force").From("cxt:forceUpdate").
-		Using("skip").From("cxt:skipFlatten").
-		Using("home").From("cxt:home").
-		Using("cache").From("cxt:useCache").
-		Using("cacheGopath").From("cxt:cacheGopath").
-		Using("useGopath").From("cxt:useGopath").
-		//Does(cmd.VendoredCleanUp, "_").
-		//Using("conf").From("cxt:flattened").
-		//Using("update").From("cxt:updateVendoredDeps").
-		// Write the Lockfile
-		Does(cmd.WriteYaml, "out").
-		Using("conf").From("cxt:Lockfile").
-		Using("filename").From("cxt:toPath").
-		Using("toStdout").From("cxt:toStdout")
-
-	reg.Route("import gpm", "Read a Godeps file").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.GPMGodeps, "godeps").
-		Does(cmd.AddDependencies, "addGodeps").
-		Using("dependencies").From("cxt:godeps").
-		Using("conf").From("cxt:cfg").
-		Does(cmd.GPMGodepsGit, "godepsGit").
-		Does(cmd.AddDependencies, "addGodepsGit").
-		Using("dependencies").From("cxt:godepsGit").
-		Using("conf").From("cxt:cfg").
-		// Does(cmd.UpdateReferences, "refs").Using("conf").From("cxt:cfg").
-		Does(cmd.WriteYaml, "out").Using("conf").From("cxt:cfg").
-		Using("filename").From("cxt:toPath")
-
-	reg.Route("import godep", "Read a Godeps.json file").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.ParseGodepGodeps, "godeps").
-		Does(cmd.AddDependencies, "addGodeps").
-		Using("dependencies").From("cxt:godeps").
-		Using("conf").From("cxt:cfg").
-		// Does(cmd.UpdateReferences, "refs").Using("conf").From("cxt:cfg").
-		Does(cmd.WriteYaml, "out").Using("conf").From("cxt:cfg").
-		Using("filename").From("cxt:toPath")
-
-	reg.Route("import gb", "Read a vendor/manifest file").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.GbManifest, "manifest").
-		Does(cmd.AddDependencies, "addGodeps").
-		Using("dependencies").From("cxt:manifest").
-		Using("conf").From("cxt:cfg").
-		Does(cmd.WriteYaml, "out").Using("conf").From("cxt:cfg").
-		Using("filename").From("cxt:toPath")
-
-	reg.Route("create", "Guess dependencies").
-		Includes("@startup").
-		Does(cmd.GuardYaml, "_").
-		Using("filename").From("cxt:yaml").
-		Does(cmd.GuessDeps, "cfg").
-		Using("skipImport").From("cxt:skipImport").
-		Does(cmd.WriteYaml, "out").
-		Using("conf").From("cxt:cfg").
-		Using("filename").From("cxt:yaml")
-
-	reg.Route("name", "Print environment").
-		Includes("@startup").
-		Includes("@ready").
-		Does(cmd.PrintName, "status").
-		Using("conf").From("cxt:cfg")
-
-	reg.Route("tree", "Print a dependency graph.").
-		Includes("@startup").
-		Does(cmd.Tree, "tree")
-	reg.Route("list", "Print a dependency graph.").
-		Includes("@startup").
-		Does(cmd.ListDeps, "list")
-
-	reg.Route("nv", "No Vendor").
-		Includes("@startup").
-		Does(cmd.NoVendor, "paths").
-		Does(cmd.PathString, "out").Using("paths").From("cxt:paths")
-
-	reg.Route("about", "Status").
-		Includes("@startup").
-		Does(cmd.About, "about")
-
-	reg.Route("@plugin", "Try to send to a plugin.").
-		Includes("@ready").
-		Does(cmd.DropToShell, "plugin").
-		Using("command").From("cxt:command")
-}
-
 func defaultGlideDir() string {
 	c, err := user.Current()
 	if err != nil {
@@ -740,3 +504,35 @@
 	}
 	return filepath.Join(c.HomeDir, ".glide")
 }
+
+// startup sets up the base environment.
+//
+// It does not assume the presence of a Glide.yaml file or vendor/ directory,
+// so it can be used by any Glide command.
+func startup(c *cli.Context) error {
+	action.Debug(c.Bool("debug"))
+	action.NoColor(c.Bool("no-color"))
+	action.Quiet(c.Bool("quiet"))
+	action.Init(c.String("yaml"), c.String("home"))
+	action.EnsureGoVendor()
+	return nil
+}
+
+// Get the path to the glide.yaml file.
+//
+// This returns the name of the path, even if the file does not exist. The value
+// may be set by the user, or it may be the default.
+func glidefile(c *cli.Context) string {
+	path := c.String("file")
+	if path == "" {
+		// For now, we construct a basic assumption. In the future, we could
+		// traverse backward to see if a glide.yaml exists in a parent.
+		path = "./glide.yaml"
+	}
+	a, err := filepath.Abs(path)
+	if err != nil {
+		// Underlying fs didn't provide working dir.
+		return path
+	}
+	return a
+}
diff --git a/glide.lock b/glide.lock
index cd88888..3ff76ad 100644
--- a/glide.lock
+++ b/glide.lock
@@ -1,14 +1,10 @@
-hash: foo
-updated: 2015-12-21T09:29:33.170992254-05:00
+hash: 4cf59f8e61ae7034d3296c0c7528aaf8784800008814fb02410cbb3ea9b34175
+updated: 2016-01-21T11:19:37.465408253-05:00
 imports:
 - name: github.com/codegangsta/cli
-  version: b5232bb2934f606f9f27a1305f1eea224e8e8b88
-- name: github.com/Masterminds/cookoo
-  version: 78aa11ce75e257c51be7ea945edb84cf19c4a6de
-  subpackages:
-  - .
+  version: c31a7975863e7810c92e2e288a9ab074f9a88f29
 - name: github.com/Masterminds/semver
-  version: 6333b7bd29aad1d79898ff568fd90a8aa533ae82
+  version: 513f3dcb3ecfb1248831fb5cb06a23a3cd5935dc
 - name: github.com/Masterminds/vcs
   version: eaee272c8fa4514e1572e182faecff5be20e792a
 - name: gopkg.in/yaml.v2
diff --git a/glide.yaml b/glide.yaml
index c888aa0..611d71c 100644
--- a/glide.yaml
+++ b/glide.yaml
@@ -1,10 +1,6 @@
 package: github.com/Masterminds/glide
 import:
 - package: gopkg.in/yaml.v2
-- package: github.com/Masterminds/cookoo
-  version: ^1.2.0
-  subpackages:
-  - .
 - package: github.com/Masterminds/vcs
   version: ^1.2.0
 - package: github.com/codegangsta/cli
diff --git a/glide_test.go b/glide_test.go
index 9e0a838..1c50468 100644
--- a/glide_test.go
+++ b/glide_test.go
@@ -2,13 +2,10 @@
 
 import (
 	"testing"
-
-	"github.com/Masterminds/cookoo"
 )
 
 func TestCommandsNonEmpty(t *testing.T) {
-	_, router, ctx := cookoo.Cookoo()
-	commands := commands(ctx, router)
+	commands := commands()
 	if len(commands) == 0 {
 		t.Fail()
 	}
diff --git a/cmd/godeps.go b/godep/godep.go
similarity index 65%
rename from cmd/godeps.go
rename to godep/godep.go
index ac12292..b39deca 100644
--- a/cmd/godeps.go
+++ b/godep/godep.go
@@ -1,4 +1,8 @@
-package cmd
+/* Package godep provides basic importing of Godep dependencies.
+
+This is not a complete implementation of Godep.
+*/
+package godep
 
 import (
 	"encoding/json"
@@ -6,8 +10,8 @@
 	"path/filepath"
 	"strings"
 
-	"github.com/Masterminds/cookoo"
 	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
 	"github.com/Masterminds/glide/util"
 )
 
@@ -35,34 +39,26 @@
 	Rev        string // VCS-specific commit ID.
 }
 
-// HasGodepGodeps is a command to detect if a package contains a Godeps.json file.
-func HasGodepGodeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", "", p)
-	path := filepath.Join(dir, "Godeps", "Godeps.json")
+// Has is a command to detect if a package contains a Godeps.json file.
+func Has(dir string) bool {
+	path := filepath.Join(dir, "Godeps/Godeps.json")
 	_, err := os.Stat(path)
-	return err == nil, nil
+	return err == nil
 }
 
-// ParseGodepGodeps parses the Godep Godeps.json file.
+// Parse parses a Godep's Godeps file.
 //
-// Params:
-// - dir (string): the project's directory
-//
-// Returns an []*cfg.Dependency
-func ParseGodepGodeps(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
-	dir := cookoo.GetString("dir", "", p)
-	return parseGodepGodeps(dir)
-}
-func parseGodepGodeps(dir string) ([]*cfg.Dependency, error) {
-	path := filepath.Join(dir, "Godeps", "Godeps.json")
+// It returns the contents as a dependency array.
+func Parse(dir string) ([]*cfg.Dependency, error) {
+	path := filepath.Join(dir, "Godeps/Godeps.json")
 	if _, err := os.Stat(path); err != nil {
 		return []*cfg.Dependency{}, nil
 	}
-	Info("Found Godeps.json file.\n")
+	msg.Info("Found Godeps.json file.\n")
 
 	buf := []*cfg.Dependency{}
 
-	godeps := new(Godeps)
+	godeps := &Godeps{}
 
 	// Get a handle to the file.
 	file, err := os.Open(path)
@@ -76,11 +72,8 @@
 		return buf, err
 	}
 
-	// Info("Importing %d packages from %s.\n", len(godeps.Deps), godeps.ImportPath)
 	seen := map[string]bool{}
-
 	for _, d := range godeps.Deps {
-		// Info("Adding package %s\n", d.ImportPath)
 		pkg := util.GetRootFromPackage(d.ImportPath)
 		sub := strings.TrimPrefix(d.ImportPath, pkg)
 		sub = strings.TrimPrefix(sub, "/")
diff --git a/gpm/gpm.go b/gpm/gpm.go
new file mode 100644
index 0000000..41de69d
--- /dev/null
+++ b/gpm/gpm.go
@@ -0,0 +1,68 @@
+/* Package gpm reads GPM's Godeps files.
+
+It is not a complete implementaton of GPM.
+*/
+package gpm
+
+import (
+	"bufio"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+)
+
+// Has indicates whether a Godeps file exists.
+func Has(dir string) bool {
+	path := filepath.Join(dir, "Godeps")
+	_, err := os.Stat(path)
+	return err == nil
+}
+
+// Parse parses a GPM-flavored Godeps file.
+func Parse(dir string) ([]*cfg.Dependency, error) {
+	path := filepath.Join(dir, "Godeps")
+	if i, err := os.Stat(path); err != nil {
+		return []*cfg.Dependency{}, nil
+	} else if i.IsDir() {
+		msg.Info("Godeps is a directory. This is probably a Godep project.\n")
+		return []*cfg.Dependency{}, nil
+	}
+	msg.Info("Found Godeps file.\n")
+
+	buf := []*cfg.Dependency{}
+
+	file, err := os.Open(path)
+	if err != nil {
+		return buf, err
+	}
+	scanner := bufio.NewScanner(file)
+	for scanner.Scan() {
+		parts, ok := parseGodepsLine(scanner.Text())
+		if ok {
+			dep := &cfg.Dependency{Name: parts[0]}
+			if len(parts) > 1 {
+				dep.Reference = parts[1]
+			}
+			buf = append(buf, dep)
+		}
+	}
+	if err := scanner.Err(); err != nil {
+		msg.Warn("Scan failed: %s\n", err)
+		return buf, err
+	}
+
+	return buf, nil
+}
+
+func parseGodepsLine(line string) ([]string, bool) {
+	line = strings.TrimSpace(line)
+
+	if len(line) == 0 || strings.HasPrefix(line, "#") {
+		return []string{}, false
+	}
+
+	return strings.Fields(line), true
+}
diff --git a/importer/importer.go b/importer/importer.go
new file mode 100644
index 0000000..51a9ba5
--- /dev/null
+++ b/importer/importer.go
@@ -0,0 +1,82 @@
+// Package importer imports dependency configuration from Glide, Godep, GPM, and GB
+package importer
+
+import (
+	"io/ioutil"
+	"os"
+	"path/filepath"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/gb"
+	"github.com/Masterminds/glide/godep"
+	"github.com/Masterminds/glide/gpm"
+)
+
+var i = &DefaultImporter{}
+
+// Import uses the DefaultImporter to import from Glide, Godep, GPM, and GB.
+func Import(path string) (bool, []*cfg.Dependency, error) {
+	return i.Import(path)
+}
+
+// Importer enables importing depenency configuration.
+type Importer interface {
+
+	// Import imports dependency configuration. It returns:
+	// - A bool if any configuration was found.
+	// - []*cfg.Dependency containing dependency configuration if any is found.
+	// - An error if one was reported.
+	Import(path string) (bool, []*cfg.Dependency, error)
+}
+
+// DefaultImporter imports from Glide, Godep, GPM, and GB.
+type DefaultImporter struct{}
+
+// Import tries to import configuration from Glide, Godep, GPM, and GB.
+func (d *DefaultImporter) Import(path string) (bool, []*cfg.Dependency, error) {
+
+	// Try importing from Glide first.
+	p := filepath.Join(path, "glide.yaml")
+	if _, err := os.Stat(p); err == nil {
+		// We found glide configuration.
+		yml, err := ioutil.ReadFile(p)
+		if err != nil {
+			return false, []*cfg.Dependency{}, err
+		}
+		conf, err := cfg.ConfigFromYaml(yml)
+		if err != nil {
+			return false, []*cfg.Dependency{}, err
+		}
+		return true, conf.Imports, nil
+	}
+
+	// Try importing from Godep
+	if godep.Has(path) {
+		deps, err := godep.Parse(path)
+		if err != nil {
+			return false, []*cfg.Dependency{}, err
+		}
+		return true, deps, nil
+	}
+
+	// Try importing from GPM
+	if gpm.Has(path) {
+		deps, err := gpm.Parse(path)
+		if err != nil {
+			return false, []*cfg.Dependency{}, err
+		}
+		return true, deps, nil
+	}
+
+	// Try importin from GB
+	if gb.Has(path) {
+		deps, err := gb.Parse(path)
+		if err != nil {
+			return false, []*cfg.Dependency{}, err
+		}
+		return true, deps, nil
+	}
+
+	// When none are found.
+	return false, []*cfg.Dependency{}, nil
+}
diff --git a/msg/msg.go b/msg/msg.go
index a357a39..880afb5 100644
--- a/msg/msg.go
+++ b/msg/msg.go
@@ -1,92 +1,214 @@
-// +build !windows
-
 package msg
 
 import (
 	"fmt"
+	"io"
+	"os"
 	"strings"
+	"sync"
 )
 
-// These contanstants map to color codes for shell scripts making them
-// human readable.
-const (
-	Blue   = "0;34"
-	Red    = "0;31"
-	Green  = "0;32"
-	Yellow = "0;33"
-	Cyan   = "0;36"
-	Pink   = "1;35"
-)
+// Messanger provides the underlying implementation that displays output to
+// users.
+type Messanger struct {
+	sync.Mutex
 
-// Color returns a string in a certain color. The first argument is a string
-// containing the color code or a constant from the table above mapped to a code.
-//
-// The following will print the string "Foo" in yellow:
-//     fmt.Print(Color(Yellow, "Foo"))
-func Color(code, msg string) string {
-	if NoColor {
-		return msg
-	}
-	return fmt.Sprintf("\033[%sm%s\033[m", code, msg)
+	// Quiet, if true, suppresses chatty levels, like Info.
+	Quiet bool
+
+	// IsDebugging, if true, shows verbose levels, like Debug.
+	IsDebugging bool
+
+	// NoColor, if true, will not use color in the output.
+	NoColor bool
+
+	// Stdout is the location where this prints output.
+	Stdout io.Writer
+
+	// Stderr is the location where this prints logs.
+	Stderr io.Writer
+
+	// PanicOnDie if true Die() will panic instead of exiting.
+	PanicOnDie bool
+
+	// The default exit code to use when dyping
+	ecode int
 }
 
+// NewMessanger creates a default Messanger to display output.
+func NewMessanger() *Messanger {
+	m := &Messanger{
+		Quiet:       false,
+		IsDebugging: false,
+		NoColor:     false,
+		Stdout:      os.Stdout,
+		Stderr:      os.Stderr,
+		PanicOnDie:  false,
+		ecode:       1,
+	}
+
+	return m
+}
+
+// Default contains a default messanger used by package level functions
+var Default = NewMessanger()
+
 // Info logs information
-func Info(msg string, args ...interface{}) {
-	if Quiet {
+func (m *Messanger) Info(msg string, args ...interface{}) {
+	if m.Quiet {
 		return
 	}
-	fmt.Fprint(Stderr, Color(Green, "[INFO] "))
-	Msg(msg, args...)
+	prefix := m.Color(Green, "[INFO] ")
+	m.Msg(prefix+msg, args...)
+}
+
+// Info logs information using the Default Messanger
+func Info(msg string, args ...interface{}) {
+	Default.Info(msg, args...)
 }
 
 // Debug logs debug information
-func Debug(msg string, args ...interface{}) {
-	if Quiet || !IsDebugging {
+func (m *Messanger) Debug(msg string, args ...interface{}) {
+	if m.Quiet || !m.IsDebugging {
 		return
 	}
-	fmt.Fprint(Stderr, "[DEBUG] ")
-	Msg(msg, args...)
+	prefix := "[DEBUG] "
+	Msg(prefix+msg, args...)
+}
+
+// Debug logs debug information using the Default Messanger
+func Debug(msg string, args ...interface{}) {
+	Default.Debug(msg, args...)
 }
 
 // Warn logs a warning
+func (m *Messanger) Warn(msg string, args ...interface{}) {
+	prefix := m.Color(Yellow, "[WARN] ")
+	m.Msg(prefix+msg, args...)
+}
+
+// Warn logs a warning using the Default Messanger
 func Warn(msg string, args ...interface{}) {
-	fmt.Fprint(Stderr, Color(Yellow, "[WARN] "))
-	ErrMsg(msg, args...)
+	Default.Warn(msg, args...)
 }
 
 // Error logs and error.
-func Error(msg string, args ...interface{}) {
-	fmt.Fprint(Stderr, Color(Red, "[ERROR] "))
-	ErrMsg(msg, args...)
+func (m *Messanger) Error(msg string, args ...interface{}) {
+	prefix := m.Color(Red, "[ERROR] ")
+	m.Msg(prefix+msg, args...)
 }
 
-// ErrMsg sends a message to Stderr
-func ErrMsg(msg string, args ...interface{}) {
-	if len(args) == 0 {
-		fmt.Fprint(Stderr, msg)
-	} else {
-		fmt.Fprintf(Stderr, msg, args...)
-	}
+// Error logs and error using the Default Messanger
+func Error(msg string, args ...interface{}) {
+	Default.Error(msg, args...)
+}
 
-	// Get rid of the annoying fact that messages need \n at the end, but do
-	// it in a backward compatible way.
-	if !strings.HasSuffix(msg, "\n") {
-		fmt.Fprintln(Stderr)
+// Die prints an error message and immediately exits the application.
+// If PanicOnDie is set to true a panic will occur instead of os.Exit being
+// called.
+func (m *Messanger) Die(msg string, args ...interface{}) {
+	m.Error(msg, args...)
+	if m.PanicOnDie {
+		panic("trapped a Die() call")
 	}
+	os.Exit(m.ecode)
+}
+
+// Die prints an error message and immediately exits the application using the
+// Default Messanger. If PanicOnDie is set to true a panic will occur instead of
+// os.Exit being called.
+func Die(msg string, args ...interface{}) {
+	Default.Die(msg, args...)
+}
+
+// ExitCode sets the exit code used by Die.
+//
+// The default is 1.
+//
+// Returns the old error code.
+func (m *Messanger) ExitCode(e int) int {
+	m.Lock()
+	old := m.ecode
+	m.ecode = e
+	m.Unlock()
+	return old
+}
+
+// ExitCode sets the exit code used by Die using the Default Messanger.
+//
+// The default is 1.
+//
+// Returns the old error code.
+func ExitCode(e int) int {
+	return Default.ExitCode(e)
 }
 
 // Msg prints a message with optional arguments, that can be printed, of
 // varying types.
-func Msg(msg string, args ...interface{}) {
-	if len(args) == 0 {
-		fmt.Fprint(Stderr, msg)
-	} else {
-		fmt.Fprintf(Stderr, msg, args...)
-	}
+func (m *Messanger) Msg(msg string, args ...interface{}) {
+	// When operations in Glide are happening concurrently messaging needs to be
+	// locked to avoid displaying one message in the middle of another one.
+	m.Lock()
+	defer m.Unlock()
 
 	// Get rid of the annoying fact that messages need \n at the end, but do
 	// it in a backward compatible way.
 	if !strings.HasSuffix(msg, "\n") {
-		fmt.Fprintln(Stderr)
+		msg += "\n"
 	}
+
+	if len(args) == 0 {
+		fmt.Fprint(m.Stderr, msg)
+	} else {
+		fmt.Fprintf(m.Stderr, msg, args...)
+	}
+}
+
+// Msg prints a message with optional arguments, that can be printed, of
+// varying types using the Default Messanger.
+func Msg(msg string, args ...interface{}) {
+	Default.Msg(msg, args...)
+}
+
+// Puts formats a message and then prints to Stdout.
+//
+// It does not prefix the message, does not color it, or otherwise decorate it.
+//
+// It does add a line feed.
+func (m *Messanger) Puts(msg string, args ...interface{}) {
+	// When operations in Glide are happening concurrently messaging needs to be
+	// locked to avoid displaying one message in the middle of another one.
+	m.Lock()
+	defer m.Unlock()
+
+	fmt.Fprintf(m.Stdout, msg, args...)
+	fmt.Fprintln(m.Stdout)
+}
+
+// Puts formats a message and then prints to Stdout using the Default Messanger.
+//
+// It does not prefix the message, does not color it, or otherwise decorate it.
+//
+// It does add a line feed.
+func Puts(msg string, args ...interface{}) {
+	Default.Puts(msg, args...)
+}
+
+// Print prints exactly the string given.
+//
+// It prints to Stdout.
+func (m *Messanger) Print(msg string) {
+	// When operations in Glide are happening concurrently messaging needs to be
+	// locked to avoid displaying one message in the middle of another one.
+	m.Lock()
+	defer m.Unlock()
+
+	fmt.Fprint(m.Stdout, msg)
+}
+
+// Print prints exactly the string given using the Default Messanger.
+//
+// It prints to Stdout.
+func Print(msg string) {
+	Default.Print(msg)
 }
diff --git a/msg/msg_windows.go b/msg/msg_windows.go
deleted file mode 100644
index 7a715f1..0000000
--- a/msg/msg_windows.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// +build windows
-
-package msg
-
-import (
-	"fmt"
-	"strings"
-)
-
-// Info logs information
-func Info(msg string, args ...interface{}) {
-	if Quiet {
-		return
-	}
-	fmt.Print("[INFO] ")
-	Msg(msg, args...)
-}
-
-// Debug logs debug information
-func Debug(msg string, args ...interface{}) {
-	if Quiet || !IsDebugging {
-		return
-	}
-	fmt.Print("[DEBUG] ")
-	Msg(msg, args...)
-}
-
-// Warn logs a warning
-func Warn(msg string, args ...interface{}) {
-	fmt.Fprint(Stderr, "[WARN] ")
-	ErrMsg(msg, args...)
-}
-
-// Error logs and error.
-func Error(msg string, args ...interface{}) {
-	fmt.Fprint(Stderr, "[ERROR] ")
-	ErrMsg(msg, args...)
-}
-
-// ErrMsg sends a message to Stderr
-func ErrMsg(msg string, args ...interface{}) {
-	if len(args) == 0 {
-		fmt.Fprint(Stderr, msg)
-		return
-	}
-	fmt.Fprintf(Stderr, msg, args...)
-}
-
-// Msg prints a message with optional arguments, that can be printed, of
-// varying types.
-func Msg(msg string, args ...interface{}) {
-	if len(args) == 0 {
-		fmt.Print(msg)
-		return
-	}
-	fmt.Printf(msg, args...)
-
-	// Get rid of the annoying fact that messages need \n at the end, but do
-	// it in a backward compatible way.
-	if !strings.HasSuffix(msg, "\n") {
-		fmt.Println("")
-	}
-}
diff --git a/msg/out.go b/msg/out.go
new file mode 100644
index 0000000..8019d21
--- /dev/null
+++ b/msg/out.go
@@ -0,0 +1,28 @@
+// +build !windows
+
+package msg
+
+import "fmt"
+
+// These contanstants map to color codes for shell scripts making them
+// human readable.
+const (
+	Blue   = "0;34"
+	Red    = "0;31"
+	Green  = "0;32"
+	Yellow = "0;33"
+	Cyan   = "0;36"
+	Pink   = "1;35"
+)
+
+// Color returns a string in a certain color. The first argument is a string
+// containing the color code or a constant from the table above mapped to a code.
+//
+// The following will print the string "Foo" in yellow:
+//     fmt.Print(Color(Yellow, "Foo"))
+func (m *Messanger) Color(code, msg string) string {
+	if m.NoColor {
+		return msg
+	}
+	return fmt.Sprintf("\033[%sm%s\033[m", code, msg)
+}
diff --git a/msg/out_windows.go b/msg/out_windows.go
new file mode 100644
index 0000000..fa71e2b
--- /dev/null
+++ b/msg/out_windows.go
@@ -0,0 +1,21 @@
+// +build windows
+
+package msg
+
+// The color codes here are for compatability with how Colors are used. Windows
+// colors have not been implemented yet. See https://github.com/Masterminds/glide/issues/158
+// for more detail.
+const (
+	Blue   = ""
+	Red    = ""
+	Green  = ""
+	Yellow = ""
+	Cyan   = ""
+	Pink   = ""
+)
+
+// Color on windows returns no color. See
+// https://github.com/Masterminds/glide/issues/158 if you want to help.
+func (m *Messanger) Color(code, msg string) string {
+	return msg
+}
diff --git a/msg/vars.go b/msg/vars.go
deleted file mode 100644
index 129b53c..0000000
--- a/msg/vars.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package msg
-
-import (
-	"fmt"
-	"os"
-)
-
-// Quiet, if true, suppresses chatty levels, like Info.
-var Quiet = false
-
-// IsDebugging, if true, shows verbose levels, like Debug.
-var IsDebugging = false
-
-// NoColor, if true, will not use color in the output.
-var NoColor = false
-
-// Stdout is the location where this prints output.
-var Stdout = os.Stdout
-
-// Stderr is the location where this prints logs.
-var Stderr = os.Stderr
-
-// Puts formats a message and then prints to Stdout.
-//
-// It does not prefix the message, does not color it, or otherwise decorate it.
-//
-// It does add a line feed.
-func Puts(msg string, args ...interface{}) {
-	fmt.Fprintf(Stdout, msg, args...)
-	fmt.Fprintln(Stdout)
-}
diff --git a/path/path.go b/path/path.go
new file mode 100644
index 0000000..466cb6d
--- /dev/null
+++ b/path/path.go
@@ -0,0 +1,249 @@
+/* Package path contains path and environment utilities for Glide.
+
+This includes tools to find and manipulate Go path variables, as well as
+tools for copying from one path to another.
+*/
+package action
+
+import (
+	"fmt"
+	"io"
+	"os"
+	"path/filepath"
+	"strings"
+)
+
+const DefaultGlideFile = "glide.yaml"
+
+// VendorDir is the name of the directory that holds vendored dependencies.
+//
+// As of Go 1.5, this is always vendor.
+var VendorDir = "vendor"
+
+// HomeDir is the home directory for Glide.
+//
+// HomeDir is where cache files and other configuration data are stored.
+var HomeDir = "$HOME/.glide"
+
+// GlideFile is the name of the Glide file.
+//
+// Setting this is not concurrency safe. For consistency, it should really
+// only be set once, at startup, or not at all.
+var GlideFile = DefaultGlideFile
+
+const LockFile = "glide.lock"
+
+// Home returns the Glide home directory ($GLIDE_HOME or ~/.glide, typically).
+//
+// This normalizes to an absolute path, and passes through os.ExpandEnv.
+func Home() string {
+	h := os.ExpandEnv(HomeDir)
+	var err error
+	if h, err = filepath.Abs(HomeDir); err != nil {
+		return HomeDir
+	}
+	return h
+}
+
+// VendorPath calculates the path to the vendor directory.
+//
+// Based on working directory, VendorDir and GlideFile, this attempts to
+// guess the location of the vendor directory.
+func Vendor() (string, error) {
+	cwd, err := os.Getwd()
+	if err != nil {
+		return "", err
+	}
+
+	// Find the directory that contains glide.yaml
+	yamldir, err := GlideWD(cwd)
+	if err != nil {
+		return cwd, err
+	}
+
+	gopath := filepath.Join(yamldir, VendorDir)
+
+	return gopath, nil
+}
+
+// Glide gets the path to the closest glide file.
+func Glide() (string, error) {
+	cwd, err := os.Getwd()
+	if err != nil {
+		return "", err
+	}
+
+	// Find the directory that contains glide.yaml
+	yamldir, err := GlideWD(cwd)
+	if err != nil {
+		return cwd, err
+	}
+
+	gf := filepath.Join(yamldir, GlideFile)
+	return gf, nil
+}
+
+// GlideWD finds the working directory of the glide.yaml file, starting at dir.
+//
+// If the glide file is not found in the current directory, it recurses up
+// a directory.
+func GlideWD(dir string) (string, error) {
+	fullpath := filepath.Join(dir, GlideFile)
+
+	if _, err := os.Stat(fullpath); err == nil {
+		return dir, nil
+	}
+
+	base := filepath.Dir(dir)
+	if base == dir {
+		return "", fmt.Errorf("Cannot resolve parent of %s", base)
+	}
+
+	return GlideWD(base)
+}
+
+// Gopath gets GOPATH from environment and return the most relevant path.
+//
+// A GOPATH can contain a colon-separated list of paths. This retrieves the
+// GOPATH and returns only the FIRST ("most relevant") path.
+//
+// This should be used carefully. If, for example, you are looking for a package,
+// you may be better off using Gopaths.
+func Gopath() string {
+	gopaths := Gopaths()
+	if len(gopaths) == 0 {
+		return ""
+	}
+	return gopaths[0]
+}
+
+// Gopaths retrieves the Gopath as a list when there is more than one path
+// listed in the Gopath.
+func Gopaths() []string {
+	p := os.Getenv("GOPATH")
+	p = strings.Trim(p, string(filepath.ListSeparator))
+	return filepath.SplitList(p)
+}
+
+// Basepath returns the current working directory.
+//
+// If there is an error getting the working directory, this returns ".", which
+// should function in cases where the directory is unlinked... Then again,
+// maybe not.
+func Basepath() string {
+	base, err := os.Getwd()
+	if err != nil {
+		return "."
+	}
+	return base
+}
+
+// IsLink returns true if the given FileInfo references a link.
+func IsLink(fi os.FileInfo) bool {
+	return fi.Mode()&os.ModeSymlink == os.ModeSymlink
+}
+
+// HasLock returns true if this can stat a lockfile at the givin location.
+func HasLock(basepath string) bool {
+	_, err := os.Stat(filepath.Join(basepath, LockFile))
+	return err == nil
+}
+
+// IsDirectoryEmpty checks if a directory is empty.
+func IsDirectoryEmpty(dir string) (bool, error) {
+	f, err := os.Open(dir)
+	if err != nil {
+		return false, err
+	}
+	defer f.Close()
+
+	_, err = f.Readdir(1)
+
+	if err == io.EOF {
+		return true, nil
+	}
+
+	return false, err
+}
+
+// CopyDir copies an entire source directory to the dest directory.
+//
+// This is akin to `cp -a src/* dest/`
+//
+// We copy the directory here rather than jumping out to a shell so we can
+// support multiple operating systems.
+func CopyDir(source string, dest string) error {
+
+	// get properties of source dir
+	si, err := os.Stat(source)
+	if err != nil {
+		return err
+	}
+
+	err = os.MkdirAll(dest, si.Mode())
+	if err != nil {
+		return err
+	}
+
+	d, _ := os.Open(source)
+
+	objects, err := d.Readdir(-1)
+
+	for _, obj := range objects {
+
+		sp := filepath.Join(source, "/", obj.Name())
+
+		dp := filepath.Join(dest, "/", obj.Name())
+
+		if obj.IsDir() {
+			err = CopyDir(sp, dp)
+			if err != nil {
+				return err
+			}
+		} else {
+			// perform copy
+			err = CopyFile(sp, dp)
+			if err != nil {
+				return err
+			}
+		}
+
+	}
+	return nil
+}
+
+// CopyFile copies a source file to a destination.
+//
+// It follows symbolic links and retains modes.
+func CopyFile(source string, dest string) error {
+	ln, err := os.Readlink(source)
+	if err == nil {
+		return os.Symlink(ln, dest)
+	}
+	s, err := os.Open(source)
+	if err != nil {
+		return err
+	}
+
+	defer s.Close()
+
+	d, err := os.Create(dest)
+	if err != nil {
+		return err
+	}
+
+	defer d.Close()
+
+	_, err = io.Copy(d, s)
+	if err != nil {
+		return err
+	}
+
+	si, err := os.Stat(source)
+	if err != nil {
+		return err
+	}
+	err = os.Chmod(dest, si.Mode())
+
+	return err
+}
diff --git a/path/path_test.go b/path/path_test.go
new file mode 100644
index 0000000..f2e4447
--- /dev/null
+++ b/path/path_test.go
@@ -0,0 +1,64 @@
+package action
+
+import (
+	"os"
+	"path/filepath"
+	"testing"
+)
+
+const testdata = "../testdata/path"
+
+func TestGlideWD(t *testing.T) {
+	wd := filepath.Join(testdata, "a/b/c")
+	found, err := GlideWD(wd)
+	if err != nil {
+		t.Errorf("Failed to get Glide directory: %s", err)
+	}
+
+	if found != filepath.Join(testdata, "a") {
+		t.Errorf("Expected %s to match %s", found, filepath.Join(wd, "a"))
+	}
+
+	// This should fail
+	wd = "/No/Such/Dir"
+	found, err = GlideWD(wd)
+	if err == nil {
+		t.Errorf("Expected to get an error on a non-existent directory, not %s", found)
+	}
+
+}
+
+func TestVendor(t *testing.T) {
+	td, err := filepath.Abs(testdata)
+	if err != nil {
+		t.Fatal(err)
+	}
+	wd, _ := os.Getwd()
+	os.Chdir(filepath.Join(td, "a/b/c"))
+	res, err := Vendor()
+	if err != nil {
+		t.Errorf("Failed to resolve vendor directory: %s", err)
+	}
+	expect := filepath.Join(td, "a", "vendor")
+	if res != expect {
+		t.Errorf("Failed to find vendor: expected %s got %s", expect, res)
+	}
+	os.Chdir(wd)
+}
+func TestGlide(t *testing.T) {
+	wd, _ := os.Getwd()
+	td, err := filepath.Abs(testdata)
+	if err != nil {
+		t.Fatal(err)
+	}
+	os.Chdir(filepath.Join(td, "a/b/c"))
+	res, err := Glide()
+	if err != nil {
+		t.Errorf("Failed to resolve vendor directory: %s", err)
+	}
+	expect := filepath.Join(td, "a", "glide.yaml")
+	if res != expect {
+		t.Errorf("Failed to find vendor: expected %s got %s", expect, res)
+	}
+	os.Chdir(wd)
+}
diff --git a/cmd/cache.go b/repo/cache.go
similarity index 95%
rename from cmd/cache.go
rename to repo/cache.go
index bbb9c7b..43ec644 100644
--- a/cmd/cache.go
+++ b/repo/cache.go
@@ -1,4 +1,4 @@
-package cmd
+package repo
 
 import (
 	"encoding/json"
@@ -10,7 +10,7 @@
 	"strings"
 	"time"
 
-	"github.com/Masterminds/cookoo"
+	//"github.com/Masterminds/glide/msg"
 )
 
 var cacheEnabled = true
@@ -19,11 +19,12 @@
 
 // EnsureCacheDir Creates the $HOME/.glide/cache directory (unless home is
 // specified to be different) if it does not exist.
+/*
 func EnsureCacheDir(c cookoo.Context, p *cookoo.Params) (interface{}, cookoo.Interrupt) {
 	home := p.Get("home", "").(string)
 	if home == "" {
 		cacheEnabled = false
-		Warn("Unable to locate home directory")
+		msg.Warn("Unable to locate home directory")
 		return false, nil
 	}
 	err := os.MkdirAll(filepath.Join(home, "cache", "info"), os.ModeDir|os.ModePerm)
@@ -33,6 +34,7 @@
 	}
 	return false, nil
 }
+*/
 
 // Pass in a repo location and get a cache key from it.
 func cacheCreateKey(repo string) (string, error) {
diff --git a/cmd/cache_test.go b/repo/cache_test.go
similarity index 96%
rename from cmd/cache_test.go
rename to repo/cache_test.go
index 6104434..d5e1c1d 100644
--- a/cmd/cache_test.go
+++ b/repo/cache_test.go
@@ -1,4 +1,4 @@
-package cmd
+package repo
 
 import "testing"
 
diff --git a/repo/installer.go b/repo/installer.go
new file mode 100644
index 0000000..c3db217
--- /dev/null
+++ b/repo/installer.go
@@ -0,0 +1,633 @@
+package repo
+
+import (
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+	"sync"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/importer"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/util"
+	"github.com/Masterminds/semver"
+	"github.com/codegangsta/cli"
+)
+
+// Installer provides facilities for installing the repos in a config file.
+type Installer struct {
+
+	// Force the install when certain normally stopping conditions occur.
+	Force bool
+
+	// Home is the location of cache
+	Home string
+
+	// Vendor contains the path to put the vendor packages
+	Vendor string
+
+	// Use a cache
+	UseCache bool
+	// Use Gopath to cache
+	UseCacheGopath bool
+	// Use Gopath as a source to read from
+	UseGopath bool
+
+	// UpdateVendored instructs the environment to update in a way that is friendly
+	// to packages that have been "vendored in" (e.g. are copies of source, not repos)
+	UpdateVendored bool
+
+	// DeleteUnused deletes packages that are unused, but found in the vendor dir.
+	DeleteUnused bool
+
+	// RootPackage is the top level package importing other packages. If an
+	// imported pacakgage references this pacakage it does not need to be
+	// downloaded and searched out again.
+	RootPackage string
+}
+
+// VendorPath returns the path to the location to put vendor packages
+func (i *Installer) VendorPath() string {
+	if i.Vendor != "" {
+		return i.Vendor
+	}
+
+	vp, err := gpath.Vendor()
+	if err != nil {
+		return filepath.FromSlash("./vendor")
+	}
+
+	return vp
+}
+
+// Install installs the dependencies from a Lockfile.
+func (i *Installer) Install(lock *cfg.Lockfile, conf *cfg.Config) (*cfg.Config, error) {
+
+	cwd, err := gpath.Vendor()
+	if err != nil {
+		return conf, err
+	}
+
+	// Create a config setup based on the Lockfile data to process with
+	// existing commands.
+	newConf := &cfg.Config{}
+	newConf.Name = conf.Name
+
+	newConf.Imports = make(cfg.Dependencies, len(lock.Imports))
+	for k, v := range lock.Imports {
+		newConf.Imports[k] = &cfg.Dependency{
+			Name:        v.Name,
+			Reference:   v.Version,
+			Repository:  v.Repository,
+			VcsType:     v.VcsType,
+			Subpackages: v.Subpackages,
+			Arch:        v.Arch,
+			Os:          v.Os,
+		}
+	}
+
+	newConf.DevImports = make(cfg.Dependencies, len(lock.DevImports))
+	for k, v := range lock.DevImports {
+		newConf.DevImports[k] = &cfg.Dependency{
+			Name:        v.Name,
+			Reference:   v.Version,
+			Repository:  v.Repository,
+			VcsType:     v.VcsType,
+			Subpackages: v.Subpackages,
+			Arch:        v.Arch,
+			Os:          v.Os,
+		}
+	}
+
+	newConf.DeDupe()
+
+	if len(newConf.Imports) == 0 {
+		msg.Info("No dependencies found. Nothing installed.\n")
+		return newConf, nil
+	}
+
+	ConcurrentUpdate(newConf.Imports, cwd, i)
+	ConcurrentUpdate(newConf.DevImports, cwd, i)
+	return newConf, nil
+}
+
+// Checkout reads the config file and checks out all dependencies mentioned there.
+//
+// This is used when initializing an empty vendor directory, or when updating a
+// vendor directory based on changed config.
+func (i *Installer) Checkout(conf *cfg.Config, useDev bool) error {
+
+	dest := i.VendorPath()
+
+	if err := ConcurrentUpdate(conf.Imports, dest, i); err != nil {
+		return err
+	}
+
+	if useDev {
+		return ConcurrentUpdate(conf.DevImports, dest, i)
+	}
+
+	return nil
+}
+
+// Update updates all dependencies.
+//
+// It begins with the dependencies in the config file, but also resolves
+// transitive dependencies. The returned lockfile has all of the dependencies
+// listed, but the version reconciliation has not been done.
+//
+// In other words, all versions in the Lockfile will be empty.
+func (i *Installer) Update(conf *cfg.Config) error {
+	base := "."
+	vpath := i.VendorPath()
+
+	ic := newImportCache()
+
+	m := &MissingPackageHandler{
+		destination: vpath,
+
+		cache:       i.UseCache,
+		cacheGopath: i.UseCacheGopath,
+		useGopath:   i.UseGopath,
+		home:        i.Home,
+		Config:      conf,
+		Use:         ic,
+	}
+
+	v := &VersionHandler{
+		Destination: vpath,
+		Use:         ic,
+		Imported:    make(map[string]bool),
+		Conflicts:   make(map[string]bool),
+		Config:      conf,
+	}
+
+	// Update imports
+	res, err := dependency.NewResolver(base)
+	if err != nil {
+		msg.Die("Failed to create a resolver: %s", err)
+	}
+	res.Config = conf
+	res.Handler = m
+	res.VersionHandler = v
+	msg.Info("Resolving imports")
+	_, err = allPackages(conf.Imports, res)
+	if err != nil {
+		msg.Die("Failed to retrieve a list of dependencies: %s", err)
+	}
+
+	msg.Warn("devImports not resolved.")
+
+	err = ConcurrentUpdate(conf.Imports, vpath, i)
+
+	return err
+}
+
+func (i *Installer) List(conf *cfg.Config) []*cfg.Dependency {
+	base := "."
+	vpath := i.VendorPath()
+
+	ic := newImportCache()
+
+	v := &VersionHandler{
+		Destination: vpath,
+		Use:         ic,
+		Imported:    make(map[string]bool),
+		Conflicts:   make(map[string]bool),
+		Config:      conf,
+	}
+
+	// Update imports
+	res, err := dependency.NewResolver(base)
+	if err != nil {
+		msg.Die("Failed to create a resolver: %s", err)
+	}
+	res.Config = conf
+	res.VersionHandler = v
+
+	msg.Info("Resolving imports")
+	_, err = allPackages(conf.Imports, res)
+	if err != nil {
+		msg.Die("Failed to retrieve a list of dependencies: %s", err)
+	}
+
+	msg.Warn("devImports not resolved.")
+
+	return conf.Imports
+}
+
+// ConcurrentUpdate takes a list of dependencies and updates in parallel.
+func ConcurrentUpdate(deps []*cfg.Dependency, cwd string, i *Installer) error {
+	done := make(chan struct{}, concurrentWorkers)
+	in := make(chan *cfg.Dependency, concurrentWorkers)
+	var wg sync.WaitGroup
+	var lock sync.Mutex
+	var returnErr error
+
+	for ii := 0; ii < concurrentWorkers; ii++ {
+		go func(ch <-chan *cfg.Dependency) {
+			for {
+				select {
+				case dep := <-ch:
+					if err := VcsUpdate(dep, cwd, i); err != nil {
+						msg.Warn("Update failed for %s: %s\n", dep.Name, err)
+						// Capture the error while making sure the concurrent
+						// operations don't step on each other.
+						lock.Lock()
+						if returnErr == nil {
+							returnErr = err
+						} else {
+							returnErr = cli.NewMultiError(returnErr, err)
+						}
+						lock.Unlock()
+					}
+					wg.Done()
+				case <-done:
+					return
+				}
+			}
+		}(in)
+	}
+
+	for _, dep := range deps {
+		wg.Add(1)
+		in <- dep
+	}
+
+	wg.Wait()
+
+	// Close goroutines setting the version
+	for ii := 0; ii < concurrentWorkers; ii++ {
+		done <- struct{}{}
+	}
+
+	return returnErr
+}
+
+// allPackages gets a list of all packages required to satisfy the given deps.
+func allPackages(deps []*cfg.Dependency, res *dependency.Resolver) ([]string, error) {
+	if len(deps) == 0 {
+		return []string{}, nil
+	}
+
+	vdir, err := gpath.Vendor()
+	if err != nil {
+		return []string{}, err
+	}
+	vdir += string(os.PathSeparator)
+	ll, err := res.ResolveAll(deps)
+	if err != nil {
+		return []string{}, err
+	}
+
+	for i := 0; i < len(ll); i++ {
+		ll[i] = strings.TrimPrefix(ll[i], vdir)
+	}
+	return ll, nil
+}
+
+// MissingPackageHandler is a dependency.MissingPackageHandler.
+//
+// When a package is not found, this attempts to resolve and fetch.
+//
+// When a package is found on the GOPATH, this notifies the user.
+type MissingPackageHandler struct {
+	destination                   string
+	home                          string
+	cache, cacheGopath, useGopath bool
+	RootPackage                   string
+	Config                        *cfg.Config
+	Use                           *importCache
+}
+
+func (m *MissingPackageHandler) NotFound(pkg string) (bool, error) {
+	root := util.GetRootFromPackage(pkg)
+
+	// Skip any references to the root package.
+	if root == m.RootPackage {
+		return false, nil
+	}
+
+	dest := filepath.Join(m.destination, root)
+
+	// This package may have been placed on the list to look for when it wasn't
+	// downloaded but it has since been downloaded before coming to this entry.
+	if _, err := os.Stat(dest); err == nil {
+		return true, nil
+	}
+
+	msg.Info("Fetching %s into %s", pkg, m.destination)
+
+	d := m.Config.Imports.Get(root)
+	// If the dependency is nil it means the Config doesn't yet know about it.
+	if d == nil {
+		d = m.Use.Get(root)
+		// We don't know about this dependency so we create a basic instance.
+		if d == nil {
+			d = &cfg.Dependency{Name: root}
+		}
+
+		m.Config.Imports = append(m.Config.Imports, d)
+	}
+	if err := VcsGet(d, dest, m.home, m.cache, m.cacheGopath, m.useGopath); err != nil {
+		return false, err
+	}
+	return true, nil
+}
+
+func (m *MissingPackageHandler) OnGopath(pkg string) (bool, error) {
+	// If useGopath is false, we fall back to the strategy of fetching from
+	// remote.
+	if !m.useGopath {
+		return m.NotFound(pkg)
+	}
+
+	root := util.GetRootFromPackage(pkg)
+
+	// Skip any references to the root package.
+	if root == m.RootPackage {
+		return false, nil
+	}
+
+	msg.Info("Copying package %s from the GOPATH.", pkg)
+	dest := filepath.Join(m.destination, pkg)
+	// Find package on Gopath
+	for _, gp := range gpath.Gopaths() {
+		src := filepath.Join(gp, pkg)
+		// FIXME: Should probably check if src is a dir or symlink.
+		if _, err := os.Stat(src); err == nil {
+			if err := os.MkdirAll(dest, os.ModeDir|0755); err != nil {
+				return false, err
+			}
+			if err := gpath.CopyDir(src, dest); err != nil {
+				return false, err
+			}
+			return true, nil
+		}
+	}
+
+	msg.Error("Could not locate %s on the GOPATH, though it was found before.", pkg)
+	return false, nil
+}
+
+// VersionHandler handles setting the proper version in the VCS.
+type VersionHandler struct {
+
+	// If Try to use the version here if we have one. This is a cache and will
+	// change over the course of setting versions.
+	Use *importCache
+
+	// Cache if importing scan has already occured here.
+	Imported map[string]bool
+
+	// Where the packages exist to set the version on.
+	Destination string
+
+	RootPackage string
+	Config      *cfg.Config
+
+	// There's a problem where many sub-packages have been asked to set a version
+	// and you can end up with numerous conflict messages that are exactly the
+	// same. We are keeping track to only display them once.
+	// the parent pac
+	Conflicts map[string]bool
+}
+
+// Process imports dependencies for a package
+func (d *VersionHandler) Process(pkg string) (e error) {
+	root := util.GetRootFromPackage(pkg)
+
+	// Skip any references to the root package.
+	if root == d.RootPackage {
+		return nil
+	}
+
+	// We have not tried to import, yet.
+	// Should we look in places other than the root of the project?
+	if d.Imported[root] == false {
+		d.Imported[root] = true
+		p := filepath.Join(d.Destination, root)
+		f, deps, err := importer.Import(p)
+		if f && err == nil {
+			for _, dep := range deps {
+
+				// The fist one wins. Would something smater than this be better?
+				exists := d.Use.Get(dep.Name)
+				if exists == nil && (dep.Reference != "" || dep.Repository != "") {
+					d.Use.Add(dep.Name, dep)
+				}
+			}
+		} else if err != nil {
+			msg.Error("Unable to import from %s. Err: %s", root, err)
+			e = err
+		}
+	}
+
+	return
+}
+
+// SetVersion sets the version for a package. If that package version is already
+// set it handles the case by:
+// - keeping the already set version
+// - proviting messaging about the version conflict
+// TODO(mattfarina): The way version setting happens can be improved. Currently not optimal.
+func (d *VersionHandler) SetVersion(pkg string) (e error) {
+	root := util.GetRootFromPackage(pkg)
+
+	// Skip any references to the root package.
+	if root == d.RootPackage {
+		return nil
+	}
+
+	v := d.Config.Imports.Get(root)
+
+	dep := d.Use.Get(root)
+	if dep != nil && v != nil {
+		if v.Reference == "" && dep.Reference != "" {
+			v.Reference = dep.Reference
+			// Clear the pin, if set, so the new version can be used.
+			v.Pin = ""
+			dep = v
+		} else if v.Reference != "" && dep.Reference != "" && v.Reference != dep.Reference {
+			dest := filepath.Join(d.Destination, filepath.FromSlash(v.Name))
+			dep = determineDependency(v, dep, dest)
+		}
+
+	} else if dep != nil {
+		// We've got an imported dependency to use and don't already have a
+		// record of it. Append it to the Imports.
+		d.Config.Imports = append(d.Config.Imports, dep)
+	} else if v != nil {
+		dep = v
+	} else {
+		// If we've gotten here we don't have any depenency objects.
+		r, sp := util.NormalizeName(pkg)
+		dep = &cfg.Dependency{
+			Name: r,
+		}
+		if sp != "" {
+			dep.Subpackages = []string{sp}
+		}
+		d.Config.Imports = append(d.Config.Imports, dep)
+	}
+
+	err := VcsVersion(dep, d.Destination)
+	if err != nil {
+		msg.Warn("Unable to set verion on %s to %s. Err: %s", root, dep.Reference, err)
+		e = err
+	}
+
+	return
+}
+
+func determineDependency(v, dep *cfg.Dependency, dest string) *cfg.Dependency {
+	repo, err := v.GetRepo(dest)
+	if err != nil {
+		singleWarn("Unable to access repo for %s\n", v.Name)
+		singleInfo("Keeping %s %s", v.Name, v.Reference)
+		return v
+	}
+
+	vIsRef := repo.IsReference(v.Reference)
+	depIsRef := repo.IsReference(dep.Reference)
+
+	// Both are references and they are different ones.
+	if vIsRef && depIsRef {
+		singleWarn("Conflict: %s ref is %s, but also asked for %s\n", v.Name, v.Reference, dep.Reference)
+		singleInfo("Keeping %s %s", v.Name, v.Reference)
+		return v
+	} else if vIsRef {
+		// The current one is a reference and the suggestion is a SemVer constraint.
+		con, err := semver.NewConstraint(dep.Reference)
+		if err != nil {
+			singleWarn("Version issue for %s: '%s' is neither a reference or semantic version constraint\n", dep.Name, dep.Reference)
+			singleInfo("Keeping %s %s", v.Name, v.Reference)
+			return v
+		}
+
+		ver, err := semver.NewVersion(v.Reference)
+		if err != nil {
+			// The existing version is not a semantic version.
+			singleWarn("Conflict: %s version is %s, but also asked for %s\n", v.Name, v.Reference, dep.Reference)
+			singleInfo("Keeping %s %s", v.Name, v.Reference)
+			return v
+		}
+
+		if con.Check(ver) {
+			singleInfo("Keeping %s %s because it fits constraint '%s'", v.Name, v.Reference, dep.Reference)
+			return v
+		}
+		singleWarn("Conflict: %s version is %s but does not meet constraint '%s'\n", v.Name, v.Reference, dep.Reference)
+		singleInfo("Keeping %s %s", v.Name, v.Reference)
+		return v
+	} else if depIsRef {
+
+		con, err := semver.NewConstraint(v.Reference)
+		if err != nil {
+			singleWarn("Version issue for %s: '%s' is neither a reference or semantic version constraint\n", v.Name, v.Reference)
+			singleInfo("Keeping %s %s", v.Name, v.Reference)
+			return v
+		}
+
+		ver, err := semver.NewVersion(dep.Reference)
+		if err != nil {
+			singleWarn("Conflict: %s version is %s, but also asked for %s\n", v.Name, v.Reference, dep.Reference)
+			singleInfo("Keeping %s %s", v.Name, v.Reference)
+			return v
+		}
+
+		if con.Check(ver) {
+			v.Reference = dep.Reference
+			singleInfo("Using %s %s because it fits constraint '%s'", v.Name, v.Reference, v.Reference)
+			return v
+		}
+		singleWarn("Conflict: %s semantic version constraint is %s but '%s' does not meet the constraint\n", v.Name, v.Reference, v.Reference)
+		singleInfo("Keeping %s %s", v.Name, v.Reference)
+		return v
+	}
+	// Neither is a vcs reference and both could be semantic version
+	// constraints that are different.
+
+	_, err = semver.NewConstraint(dep.Reference)
+	if err != nil {
+		// dd.Reference is not a reference or a valid constraint.
+		singleWarn("Version %s %s is not a reference or valid semantic version constraint\n", dep.Name, dep.Reference)
+		singleInfo("Keeping %s %s", v.Name, v.Reference)
+		return v
+	}
+
+	_, err = semver.NewConstraint(v.Reference)
+	if err != nil {
+		// existing.Reference is not a reference or a valid constraint.
+		// We really should never end up here.
+		singleWarn("Version %s %s is not a reference or valid semantic version constraint\n", v.Name, v.Reference)
+
+		v.Reference = dep.Reference
+		v.Pin = ""
+		singleInfo("Using %s %s because it is a valid version", v.Name, v.Reference)
+		return v
+	}
+
+	// Both versions are constraints. Try to merge them.
+	// If either comparison has an || skip merging. That's complicated.
+	ddor := strings.Index(dep.Reference, "||")
+	eor := strings.Index(v.Reference, "||")
+	if ddor == -1 && eor == -1 {
+		// Add the comparisons together.
+		newRef := v.Reference + ", " + dep.Reference
+		v.Reference = newRef
+		v.Pin = ""
+		singleInfo("Combining %s semantic version constraints %s and %s", v.Name, v.Reference, dep.Reference)
+		return v
+	}
+	singleWarn("Conflict: %s version is %s, but also asked for %s\n", v.Name, v.Reference, dep.Reference)
+	singleInfo("Keeping %s %s", v.Name, v.Reference)
+	return v
+}
+
+var warningMessage = make(map[string]bool)
+var infoMessage = make(map[string]bool)
+
+func singleWarn(ft string, v ...interface{}) {
+	m := fmt.Sprintf(ft, v...)
+	_, f := warningMessage[m]
+	if !f {
+		msg.Warn(m)
+		warningMessage[m] = true
+	}
+}
+
+func singleInfo(ft string, v ...interface{}) {
+	m := fmt.Sprintf(ft, v...)
+	_, f := infoMessage[m]
+	if !f {
+		msg.Info(m)
+		infoMessage[m] = true
+	}
+}
+
+type importCache struct {
+	cache map[string]*cfg.Dependency
+}
+
+func newImportCache() *importCache {
+	return &importCache{
+		cache: make(map[string]*cfg.Dependency),
+	}
+}
+
+func (i *importCache) Get(name string) *cfg.Dependency {
+	d, f := i.cache[name]
+	if f {
+		return d
+	}
+
+	return nil
+}
+
+func (i *importCache) Add(name string, dep *cfg.Dependency) {
+	i.cache[name] = dep
+}
diff --git a/repo/repo.go b/repo/repo.go
new file mode 100644
index 0000000..3270f5c
--- /dev/null
+++ b/repo/repo.go
@@ -0,0 +1,17 @@
+/* package Repo provides tools for working with VCS repositories.
+
+Glide manages repositories in the vendor directory by using the native VCS
+systems of each repository upon which the code relies.
+*/
+package repo
+
+// concurrentWorkers is the number of workers to be used in concurrent operations.
+var concurrentWorkers = 20
+
+// UpdatingVendored indicates whether this run of Glide is updating a vendored vendor/ path.
+//
+// It is related to the --update-vendor flag for update and install.
+//
+// TODO: This is legacy, and maybe we should handle it differently. It should
+// be set either 0 or 1 times, and only at startup.
+//var UpdatingVendored bool = false
diff --git a/cmd/semver.go b/repo/semver.go
similarity index 74%
rename from cmd/semver.go
rename to repo/semver.go
index 566f431..e200a2c 100644
--- a/cmd/semver.go
+++ b/repo/semver.go
@@ -1,4 +1,4 @@
-package cmd
+package repo
 
 import (
 	"github.com/Masterminds/semver"
@@ -33,16 +33,3 @@
 
 	return append(branches, tags...), nil
 }
-
-func isBranch(branch string, repo vcs.Repo) (bool, error) {
-	branches, err := repo.Branches()
-	if err != nil {
-		return false, err
-	}
-	for _, b := range branches {
-		if b == branch {
-			return true, nil
-		}
-	}
-	return false, nil
-}
diff --git a/repo/set_reference.go b/repo/set_reference.go
new file mode 100644
index 0000000..d40615b
--- /dev/null
+++ b/repo/set_reference.go
@@ -0,0 +1,59 @@
+package repo
+
+import (
+	"sync"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+)
+
+// SetReference is a command to set the VCS reference (commit id, tag, etc) for
+// a project.
+func SetReference(conf *cfg.Config) error {
+
+	cwd, err := gpath.Vendor()
+	if err != nil {
+		return err
+	}
+
+	if len(conf.Imports) == 0 {
+		msg.Info("No references set.\n")
+		return nil
+	}
+
+	done := make(chan struct{}, concurrentWorkers)
+	in := make(chan *cfg.Dependency, concurrentWorkers)
+	var wg sync.WaitGroup
+
+	for i := 0; i < concurrentWorkers; i++ {
+		go func(ch <-chan *cfg.Dependency) {
+			for {
+				select {
+				case dep := <-ch:
+					if err := VcsVersion(dep, cwd); err != nil {
+						msg.Warn("Failed to set version on %s to %s: %s\n", dep.Name, dep.Reference, err)
+					}
+					wg.Done()
+				case <-done:
+					return
+				}
+			}
+		}(in)
+	}
+
+	for _, dep := range conf.Imports {
+		wg.Add(1)
+		in <- dep
+	}
+
+	wg.Wait()
+	// Close goroutines setting the version
+	for i := 0; i < concurrentWorkers; i++ {
+		done <- struct{}{}
+	}
+	// close(done)
+	// close(in)
+
+	return nil
+}
diff --git a/repo/vcs.go b/repo/vcs.go
new file mode 100644
index 0000000..d463b22
--- /dev/null
+++ b/repo/vcs.go
@@ -0,0 +1,662 @@
+package repo
+
+import (
+	"encoding/json"
+	"fmt"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"sort"
+	"strings"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/semver"
+	v "github.com/Masterminds/vcs"
+)
+
+// VcsUpdate updates to a particular checkout based on the VCS setting.
+func VcsUpdate(dep *cfg.Dependency, vend string, inst *Installer) error {
+
+	// If the dependency has already been pinned we can skip it. This is a
+	// faster path so we don't need to resolve it again.
+	if dep.Pin != "" {
+		msg.Debug("Dependency %s has already been pinned. Fetching updates skipped.", dep.Name)
+		return nil
+	}
+
+	msg.Info("Fetching updates for %s.\n", dep.Name)
+
+	if filterArchOs(dep) {
+		msg.Info("%s is not used for %s/%s.\n", dep.Name, runtime.GOOS, runtime.GOARCH)
+		return nil
+	}
+
+	dest := filepath.Join(vend, dep.Name)
+	// If destination doesn't exist we need to perform an initial checkout.
+	if _, err := os.Stat(dest); os.IsNotExist(err) {
+		if err = VcsGet(dep, dest, inst.Home, inst.UseCache, inst.UseCacheGopath, inst.UseGopath); err != nil {
+			msg.Warn("Unable to checkout %s\n", dep.Name)
+			return err
+		}
+	} else {
+		// At this point we have a directory for the package.
+
+		// When the directory is not empty and has no VCS directory it's
+		// a vendored files situation.
+		empty, err := gpath.IsDirectoryEmpty(dest)
+		if err != nil {
+			return err
+		}
+		_, err = v.DetectVcsFromFS(dest)
+		if inst.UpdateVendored == false && empty == false && err == v.ErrCannotDetectVCS {
+			msg.Warn("%s appears to be a vendored package. Unable to update. Consider the '--update-vendored' flag.\n", dep.Name)
+		} else {
+
+			if inst.UpdateVendored == true && empty == false && err == v.ErrCannotDetectVCS {
+				// A vendored package, no repo, and updating the vendored packages
+				// has been opted into.
+				msg.Info("%s is a vendored package. Updating.", dep.Name)
+				err = os.RemoveAll(dest)
+				if err != nil {
+					msg.Error("Unable to update vendored dependency %s.\n", dep.Name)
+					return err
+				} else {
+					dep.UpdateAsVendored = true
+				}
+
+				if err = VcsGet(dep, dest, inst.Home, inst.UseCache, inst.UseCacheGopath, inst.UseGopath); err != nil {
+					msg.Warn("Unable to checkout %s\n", dep.Name)
+					return err
+				}
+
+				return nil
+			}
+
+			repo, err := dep.GetRepo(dest)
+
+			// Tried to checkout a repo to a path that does not work. Either the
+			// type or endpoint has changed. Force is being passed in so the old
+			// location can be removed and replaced with the new one.
+			// Warning, any changes in the old location will be deleted.
+			// TODO: Put dirty checking in on the existing local checkout.
+			if (err == v.ErrWrongVCS || err == v.ErrWrongRemote) && inst.Force == true {
+				var newRemote string
+				if len(dep.Repository) > 0 {
+					newRemote = dep.Repository
+				} else {
+					newRemote = "https://" + dep.Name
+				}
+
+				msg.Warn("Replacing %s with contents from %s\n", dep.Name, newRemote)
+				rerr := os.RemoveAll(dest)
+				if rerr != nil {
+					return rerr
+				}
+				if err = VcsGet(dep, dest, inst.Home, inst.UseCache, inst.UseCacheGopath, inst.UseGopath); err != nil {
+					msg.Warn("Unable to checkout %s\n", dep.Name)
+					return err
+				}
+			} else if err != nil {
+				return err
+			} else if repo.IsDirty() {
+				return fmt.Errorf("%s contains uncommited changes. Skipping update", dep.Name)
+			} else {
+
+				// Check if the current version is a tag or commit id. If it is
+				// and that version is already checked out we can skip updating
+				// which is faster than going out to the Internet to perform
+				// an update.
+				if dep.Reference != "" {
+					version, err := repo.Version()
+					if err != nil {
+						return err
+					}
+					ib, err := isBranch(dep.Reference, repo)
+					if err != nil {
+						return err
+					}
+
+					// If the current version equals the ref and it's not a
+					// branch it's a tag or commit id so we can skip
+					// performing an update.
+					if version == dep.Reference && !ib {
+						msg.Info("%s is already set to version %s. Skipping update.", dep.Name, dep.Reference)
+						return nil
+					}
+				}
+
+				if err := repo.Update(); err != nil {
+					msg.Warn("Download failed.\n")
+					return err
+				}
+			}
+		}
+	}
+
+	return nil
+}
+
+// VcsVersion set the VCS version for a checkout.
+func VcsVersion(dep *cfg.Dependency, vend string) error {
+
+	// If the dependency has already been pinned we can skip it. This is a
+	// faster path so we don't need to resolve it again.
+	if dep.Pin != "" {
+		msg.Debug("Dependency %s has already been pinned. Setting version skipped.", dep.Name)
+		return nil
+	}
+
+	cwd := filepath.Join(vend, dep.Name)
+
+	// If there is no refernece configured there is nothing to set.
+	if dep.Reference == "" {
+		// Before exiting update the pinned version
+		repo, err := dep.GetRepo(cwd)
+		if err != nil {
+			return err
+		}
+		dep.Pin, err = repo.Version()
+		if err != nil {
+			return err
+		}
+		return nil
+	}
+
+	// When the directory is not empty and has no VCS directory it's
+	// a vendored files situation.
+	empty, err := gpath.IsDirectoryEmpty(cwd)
+	if err != nil {
+		return err
+	}
+	_, err = v.DetectVcsFromFS(cwd)
+	if empty == false && err == v.ErrCannotDetectVCS {
+		msg.Warn("%s appears to be a vendored package. Unable to set new version. Consider the '--update-vendored' flag.\n", dep.Name)
+	} else {
+		repo, err := dep.GetRepo(cwd)
+		if err != nil {
+			return err
+		}
+
+		ver := dep.Reference
+		// Referenes in Git can begin with a ^ which is similar to semver.
+		// If there is a ^ prefix we assume it's a semver constraint rather than
+		// part of the git/VCS commit id.
+		if repo.IsReference(ver) && !strings.HasPrefix(ver, "^") {
+			msg.Info("Setting version for %s to %s.\n", dep.Name, ver)
+		} else {
+
+			// Create the constraing first to make sure it's valid before
+			// working on the repo.
+			constraint, err := semver.NewConstraint(ver)
+
+			// Make sure the constriant is valid. At this point it's not a valid
+			// reference so if it's not a valid constrint we can exit early.
+			if err != nil {
+				msg.Warn("The reference '%s' is not valid\n", ver)
+				return err
+			}
+
+			// Get the tags and branches (in that order)
+			refs, err := getAllVcsRefs(repo)
+			if err != nil {
+				return err
+			}
+
+			// Convert and filter the list to semver.Version instances
+			semvers := getSemVers(refs)
+
+			// Sort semver list
+			sort.Sort(sort.Reverse(semver.Collection(semvers)))
+			found := false
+			for _, v := range semvers {
+				if constraint.Check(v) {
+					found = true
+					// If the constrint passes get the original reference
+					ver = v.Original()
+					break
+				}
+			}
+			if found {
+				msg.Info("Detected semantic version. Setting version for %s to %s.\n", dep.Name, ver)
+			} else {
+				msg.Warn("Unable to find semantic version for constraint %s %s\n", dep.Name, ver)
+			}
+		}
+		if err := repo.UpdateVersion(ver); err != nil {
+			msg.Error("Failed to set version to %s: %s\n", dep.Reference, err)
+			return err
+		}
+		dep.Pin, err = repo.Version()
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+// VcsGet figures out how to fetch a dependency, and then gets it.
+//
+// VcsGet installs into the dest.
+func VcsGet(dep *cfg.Dependency, dest, home string, cache, cacheGopath, useGopath bool) error {
+	// When not skipping the $GOPATH look in it for a copy of the package
+	if useGopath {
+		// Check if the $GOPATH has a viable version to use and if so copy to vendor
+		gps := gpath.Gopaths()
+		for _, p := range gps {
+			d := filepath.Join(p, "src", dep.Name)
+			if _, err := os.Stat(d); err == nil {
+				empty, err := gpath.IsDirectoryEmpty(d)
+				if empty || err != nil {
+					continue
+				}
+
+				repo, err := dep.GetRepo(d)
+				if err != nil {
+					continue
+				}
+
+				// Dirty repos have uncomitted changes.
+				if repo.IsDirty() {
+					continue
+				}
+
+				// Having found a repo we copy it to vendor and update it.
+				msg.Debug("Found %s in GOPATH at %s. Copying to %s", dep.Name, d, dest)
+				err = gpath.CopyDir(d, dest)
+				if err != nil {
+					return err
+				}
+
+				// Update the repo in the vendor directory
+				msg.Debug("Updating %s, now in the vendor path at %s", dep.Name, dest)
+				repo, err = dep.GetRepo(dest)
+				if err != nil {
+					return err
+				}
+				err = repo.Update()
+				if err != nil {
+					return err
+				}
+
+				// If there is no reference set on the dep we try to checkout
+				// the default branch.
+				if dep.Reference == "" {
+					db := defaultBranch(repo, home)
+					if db != "" {
+						err = repo.UpdateVersion(db)
+						if err != nil && msg.Default.IsDebugging {
+							msg.Debug("Attempting to set the version on %s to %s failed. Error %s", dep.Name, db, err)
+						}
+					}
+				}
+				return nil
+			}
+		}
+	}
+
+	// When opting in to cache in the GOPATH attempt to do put a copy there.
+	if cacheGopath {
+
+		// Since we didn't find an existing copy in the GOPATHs try to clone there.
+		gp := gpath.Gopath()
+		if gp != "" {
+			d := filepath.Join(gp, "src", dep.Name)
+			if _, err := os.Stat(d); os.IsNotExist(err) {
+				// Empty directory so we checkout out the code here.
+				msg.Debug("Retrieving %s to %s before copying to vendor", dep.Name, d)
+				repo, err := dep.GetRepo(d)
+				if err != nil {
+					return err
+				}
+				repo.Get()
+
+				branch := findCurrentBranch(repo)
+				if branch != "" {
+					// we know the default branch so we can store it in the cache
+					var loc string
+					if dep.Repository != "" {
+						loc = dep.Repository
+					} else {
+						loc = "https://" + dep.Name
+					}
+					key, err := cacheCreateKey(loc)
+					if err == nil {
+						msg.Debug("Saving default branch for %s", repo.Remote())
+						c := cacheRepoInfo{DefaultBranch: branch}
+						err = saveCacheRepoData(key, c, home)
+						if msg.Default.IsDebugging && err == errCacheDisabled {
+							msg.Debug("Unable to cache default branch because caching is disabled")
+						}
+					}
+				}
+
+				msg.Debug("Copying %s from GOPATH at %s to %s", dep.Name, d, dest)
+				err = gpath.CopyDir(d, dest)
+				if err != nil {
+					return err
+				}
+
+				return nil
+			}
+		}
+	}
+
+	// If opting in to caching attempt to put it in the cache folder
+	if cache {
+		// Check if the cache has a viable version and try to use that.
+		var loc string
+		if dep.Repository != "" {
+			loc = dep.Repository
+		} else {
+			loc = "https://" + dep.Name
+		}
+		key, err := cacheCreateKey(loc)
+		if err == nil {
+			d := filepath.Join(home, "cache", "src", key)
+
+			repo, err := dep.GetRepo(d)
+			if err != nil {
+				return err
+			}
+			// If the directory does not exist this is a first cache.
+			if _, err = os.Stat(d); os.IsNotExist(err) {
+				msg.Debug("Adding %s to the cache for the first time", dep.Name)
+				err = repo.Get()
+				if err != nil {
+					return err
+				}
+				branch := findCurrentBranch(repo)
+				if branch != "" {
+					// we know the default branch so we can store it in the cache
+					var loc string
+					if dep.Repository != "" {
+						loc = dep.Repository
+					} else {
+						loc = "https://" + dep.Name
+					}
+					key, err := cacheCreateKey(loc)
+					if err == nil {
+						msg.Debug("Saving default branch for %s", repo.Remote())
+						c := cacheRepoInfo{DefaultBranch: branch}
+						err = saveCacheRepoData(key, c, home)
+						if err == errCacheDisabled {
+							msg.Debug("Unable to cache default branch because caching is disabled")
+						} else if err != nil {
+							msg.Debug("Error saving %s to cache. Error: %s", repo.Remote(), err)
+						}
+					}
+				}
+
+			} else {
+				msg.Debug("Updating %s in the cache", dep.Name)
+				err = repo.Update()
+				if err != nil {
+					return err
+				}
+			}
+
+			msg.Debug("Copying %s from the cache to %s", dep.Name, dest)
+			err = gpath.CopyDir(d, dest)
+			if err != nil {
+				return err
+			}
+
+			return nil
+		} else {
+			msg.Warn("Cache key generation error: %s", err)
+		}
+	}
+
+	// If unable to cache pull directly into the vendor/ directory.
+	repo, err := dep.GetRepo(dest)
+	if err != nil {
+		return err
+	}
+
+	gerr := repo.Get()
+
+	// Attempt to cache the default branch
+	branch := findCurrentBranch(repo)
+	if branch != "" {
+		// we know the default branch so we can store it in the cache
+		var loc string
+		if dep.Repository != "" {
+			loc = dep.Repository
+		} else {
+			loc = "https://" + dep.Name
+		}
+		key, err := cacheCreateKey(loc)
+		if err == nil {
+			msg.Debug("Saving default branch for %s", repo.Remote())
+			c := cacheRepoInfo{DefaultBranch: branch}
+			err = saveCacheRepoData(key, c, home)
+			if err == errCacheDisabled {
+				msg.Debug("Unable to cache default branch because caching is disabled")
+			} else if err != nil {
+				msg.Debug("Error saving %s to cache - Error: %s", repo.Remote(), err)
+			}
+		}
+	}
+
+	return gerr
+}
+
+// filterArchOs indicates a dependency should be filtered out because it is
+// the wrong GOOS or GOARCH.
+//
+// FIXME: Should this be moved to the dependency package?
+func filterArchOs(dep *cfg.Dependency) bool {
+	found := false
+	if len(dep.Arch) > 0 {
+		for _, a := range dep.Arch {
+			if a == runtime.GOARCH {
+				found = true
+			}
+		}
+		// If it's not found, it should be filtered out.
+		if !found {
+			return true
+		}
+	}
+
+	found = false
+	if len(dep.Os) > 0 {
+		for _, o := range dep.Os {
+			if o == runtime.GOOS {
+				found = true
+			}
+		}
+		if !found {
+			return true
+		}
+
+	}
+
+	return false
+}
+
+// isBranch returns true if the given string is a branch in VCS.
+func isBranch(branch string, repo v.Repo) (bool, error) {
+	branches, err := repo.Branches()
+	if err != nil {
+		return false, err
+	}
+	for _, b := range branches {
+		if b == branch {
+			return true, nil
+		}
+	}
+	return false, nil
+}
+
+// defaultBranch tries to ascertain the default branch for the given repo.
+// Some repos will have multiple branches in them (e.g. Git) while others
+// (e.g. Svn) will not.
+func defaultBranch(repo v.Repo, home string) string {
+
+	// Svn and Bzr use different locations (paths or entire locations)
+	// for branches so we won't have a default branch.
+	if repo.Vcs() == v.Svn || repo.Vcs() == v.Bzr {
+		return ""
+	}
+
+	// Check the cache for a value.
+	key, kerr := cacheCreateKey(repo.Remote())
+	var d cacheRepoInfo
+	if kerr == nil {
+		d, err := cacheRepoData(key, home)
+		if err == nil {
+			if d.DefaultBranch != "" {
+				return d.DefaultBranch
+			}
+		}
+	}
+
+	// If we don't have it in the store try some APIs
+	r := repo.Remote()
+	u, err := url.Parse(r)
+	if err != nil {
+		return ""
+	}
+	if u.Scheme == "" {
+		// Where there is no scheme we try urls like git@github.com:foo/bar
+		r = strings.Replace(r, ":", "/", -1)
+		r = "ssh://" + r
+		u, err = url.Parse(r)
+		if err != nil {
+			return ""
+		}
+		u.Scheme = ""
+	}
+	if u.Host == "github.com" {
+		parts := strings.Split(u.Path, "/")
+		if len(parts) != 2 {
+			return ""
+		}
+		api := fmt.Sprintf("https://api.github.com/repos/%s/%s", parts[0], parts[1])
+		resp, err := http.Get(api)
+		if err != nil {
+			return ""
+		}
+		defer resp.Body.Close()
+		if resp.StatusCode >= 300 || resp.StatusCode < 200 {
+			return ""
+		}
+		body, err := ioutil.ReadAll(resp.Body)
+		var data interface{}
+		err = json.Unmarshal(body, &data)
+		if err != nil {
+			return ""
+		}
+		gh := data.(map[string]interface{})
+		db := gh["default_branch"].(string)
+		if kerr == nil {
+			d.DefaultBranch = db
+			err := saveCacheRepoData(key, d, home)
+			if err == errCacheDisabled {
+				msg.Debug("Unable to cache default branch because caching is disabled")
+			} else if err != nil {
+				msg.Debug("Error saving %s to cache. Error: %s", repo.Remote(), err)
+			}
+		}
+		return db
+	}
+
+	if u.Host == "bitbucket.org" {
+		parts := strings.Split(u.Path, "/")
+		if len(parts) != 2 {
+			return ""
+		}
+		api := fmt.Sprintf("https://bitbucket.org/api/1.0/repositories/%s/%s/main-branch/", parts[0], parts[1])
+		resp, err := http.Get(api)
+		if err != nil {
+			return ""
+		}
+		defer resp.Body.Close()
+		if resp.StatusCode >= 300 || resp.StatusCode < 200 {
+			return ""
+		}
+		body, err := ioutil.ReadAll(resp.Body)
+		var data interface{}
+		err = json.Unmarshal(body, &data)
+		if err != nil {
+			return ""
+		}
+		bb := data.(map[string]interface{})
+		db := bb["name"].(string)
+		if kerr == nil {
+			d.DefaultBranch = db
+			err := saveCacheRepoData(key, d, home)
+			if err == errCacheDisabled {
+				msg.Debug("Unable to cache default branch because caching is disabled")
+			} else if err != nil {
+				msg.Debug("Error saving %s to cache. Error: %s", repo.Remote(), err)
+			}
+		}
+		return db
+	}
+
+	return ""
+}
+
+// From a local repo find out the current branch name if there is one.
+func findCurrentBranch(repo v.Repo) string {
+	msg.Debug("Attempting to find current branch for %s", repo.Remote())
+	// Svn and Bzr don't have default branches.
+	if repo.Vcs() == v.Svn || repo.Vcs() == v.Bzr {
+		return ""
+	}
+
+	if repo.Vcs() == v.Git {
+		c := exec.Command("git", "symbolic-ref", "--short", "HEAD")
+		c.Dir = repo.LocalPath()
+		c.Env = envForDir(c.Dir)
+		out, err := c.CombinedOutput()
+		if err != nil {
+			msg.Debug("Unable to find current branch for %s, error: %s", repo.Remote(), err)
+			return ""
+		}
+		return strings.TrimSpace(string(out))
+	}
+
+	if repo.Vcs() == v.Hg {
+		c := exec.Command("hg", "branch")
+		c.Dir = repo.LocalPath()
+		c.Env = envForDir(c.Dir)
+		out, err := c.CombinedOutput()
+		if err != nil {
+			msg.Debug("Unable to find current branch for %s, error: %s", repo.Remote(), err)
+			return ""
+		}
+		return strings.TrimSpace(string(out))
+	}
+
+	return ""
+}
+
+func envForDir(dir string) []string {
+	env := os.Environ()
+	return mergeEnvLists([]string{"PWD=" + dir}, env)
+}
+
+func mergeEnvLists(in, out []string) []string {
+NextVar:
+	for _, inkv := range in {
+		k := strings.SplitAfterN(inkv, "=", 2)[0]
+		for i, outkv := range out {
+			if strings.HasPrefix(outkv, k) {
+				out[i] = inkv
+				continue NextVar
+			}
+		}
+		out = append(out, inkv)
+	}
+	return out
+}
diff --git a/repo/vendored_cleanup.go b/repo/vendored_cleanup.go
new file mode 100644
index 0000000..de464d6
--- /dev/null
+++ b/repo/vendored_cleanup.go
@@ -0,0 +1,43 @@
+package repo
+
+import (
+	"os"
+	"path/filepath"
+
+	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+)
+
+// VendoredCleanUp cleans up vendored codebases after an update.
+//
+// This should _only_ be run for installations that do not want VCS repos inside
+// of the vendor/ directory.
+func VendoredCleanup(conf *cfg.Config) error {
+	vend, err := gpath.Vendor()
+	if err != nil {
+		return err
+	}
+
+	for _, dep := range conf.Imports {
+		if dep.UpdateAsVendored == true {
+			msg.Info("Cleaning up vendored package %s\n", dep.Name)
+
+			// Remove the VCS directory
+			cwd := filepath.Join(vend, dep.Name)
+			repo, err := dep.GetRepo(cwd)
+			if err != nil {
+				msg.Error("Error cleaning up %s:%s", dep.Name, err)
+				continue
+			}
+			t := repo.Vcs()
+			err = os.RemoveAll(cwd + string(os.PathSeparator) + "." + string(t))
+			if err != nil {
+				msg.Error("Error cleaning up VCS dir for %s:%s", dep.Name, err)
+			}
+		}
+
+	}
+
+	return nil
+}
diff --git a/testdata/name/glide.yaml b/testdata/name/glide.yaml
new file mode 100644
index 0000000..8987dfb
--- /dev/null
+++ b/testdata/name/glide.yaml
@@ -0,0 +1,2 @@
+package: technosophos.com/x/foo
+import: []
diff --git a/testdata/name/glide2.yaml b/testdata/name/glide2.yaml
new file mode 100644
index 0000000..29d8780
--- /dev/null
+++ b/testdata/name/glide2.yaml
@@ -0,0 +1,2 @@
+package: another/name
+import: []
diff --git a/testdata/nv/a/foo.empty b/testdata/nv/a/foo.empty
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testdata/nv/a/foo.empty
diff --git a/testdata/nv/b/foo.empty b/testdata/nv/b/foo.empty
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testdata/nv/b/foo.empty
diff --git a/testdata/nv/c/foo.empty b/testdata/nv/c/foo.empty
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testdata/nv/c/foo.empty
diff --git a/testdata/path/a/b/c/placeholder.empty b/testdata/path/a/b/c/placeholder.empty
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testdata/path/a/b/c/placeholder.empty
diff --git a/testdata/path/a/glide.yaml b/testdata/path/a/glide.yaml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testdata/path/a/glide.yaml
diff --git a/testdata/plugin/glide-hello b/testdata/plugin/glide-hello
new file mode 100755
index 0000000..e9d6e69
--- /dev/null
+++ b/testdata/plugin/glide-hello
@@ -0,0 +1,2 @@
+#!/bin/bash
+echo "Hello from the other glide"
diff --git a/testdata/rebuild/glide.yaml b/testdata/rebuild/glide.yaml
new file mode 100644
index 0000000..7d204f1
--- /dev/null
+++ b/testdata/rebuild/glide.yaml
@@ -0,0 +1,3 @@
+package: github.com/Masterminds/glide/testdata/plugin
+import:
+  - package: example.com/x/foo
diff --git a/tree/tree.go b/tree/tree.go
new file mode 100644
index 0000000..0ba0d56
--- /dev/null
+++ b/tree/tree.go
@@ -0,0 +1,172 @@
+package tree
+
+import (
+	"container/list"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/msg"
+	gpath "github.com/Masterminds/glide/path"
+	"github.com/Masterminds/glide/util"
+)
+
+// Display displays a tree view of the given project.
+//
+// FIXME: The output formatting could use some TLC.
+func Display(b *util.BuildCtxt, basedir, myName string, level int, core bool, l *list.List) {
+	deps := walkDeps(b, basedir, myName)
+	for _, name := range deps {
+		found := findPkg(b, name, basedir)
+		if found.Loc == dependency.LocUnknown {
+			m := "glide get " + found.Name
+			msg.Puts("\t%s\t(%s)", found.Name, m)
+			continue
+		}
+		if !core && found.Loc == dependency.LocGoroot || found.Loc == dependency.LocCgo {
+			continue
+		}
+		msg.Print(strings.Repeat("|\t", level-1) + "|-- ")
+
+		f := findInList(found.Name, l)
+		if f == true {
+			msg.Puts("(Recursion) %s   (%s)", found.Name, found.Path)
+		} else {
+			// Every branch in the tree is a copy to handle all the branches
+			cl := copyList(l)
+			cl.PushBack(found.Name)
+			msg.Puts("%s   (%s)", found.Name, found.Path)
+			Display(b, found.Path, found.Name, level+1, core, cl)
+		}
+	}
+}
+
+func walkDeps(b *util.BuildCtxt, base, myName string) []string {
+	externalDeps := []string{}
+	filepath.Walk(base, func(path string, fi os.FileInfo, err error) error {
+		if !dependency.IsSrcDir(fi) {
+			if fi.IsDir() {
+				return filepath.SkipDir
+			}
+			return nil
+		}
+
+		pkg, err := b.ImportDir(path, 0)
+		if err != nil {
+			if !strings.HasPrefix(err.Error(), "no buildable Go source") {
+				msg.Warn("Error: %s (%s)", err, path)
+				// Not sure if we should return here.
+				//return err
+			}
+		}
+
+		if pkg.Goroot {
+			return nil
+		}
+
+		for _, imp := range pkg.Imports {
+			//if strings.HasPrefix(imp, myName) {
+			////Info("Skipping %s because it is a subpackage of %s", imp, myName)
+			//continue
+			//}
+			if imp == myName {
+				continue
+			}
+			externalDeps = append(externalDeps, imp)
+		}
+
+		return nil
+	})
+	return externalDeps
+}
+
+func findPkg(b *util.BuildCtxt, name, cwd string) *dependency.PkgInfo {
+	var fi os.FileInfo
+	var err error
+	var p string
+
+	info := &dependency.PkgInfo{
+		Name: name,
+	}
+
+	// Recurse backward to scan other vendor/ directories
+	// If the cwd isn't an absolute path walking upwards looking for vendor/
+	// folders can get into an infinate loop.
+	abs, err := filepath.Abs(cwd)
+	if err != nil {
+		abs = cwd
+	}
+	if abs != "." {
+		// Previously there was a check on the loop that wd := "/". The path
+		// "/" is a POSIX path so this fails on Windows. Now the check is to
+		// make sure the same wd isn't seen twice. When the same wd happens
+		// more than once it's the beginning of looping on the same location
+		// which is the top level.
+		pwd := ""
+		for wd := abs; wd != pwd; wd = filepath.Dir(wd) {
+			pwd = wd
+
+			// Don't look for packages outside the GOPATH
+			// Note, the GOPATH may or may not end with the path separator.
+			// The output of filepath.Dir does not the the path separator on the
+			// end so we need to test both.
+			if wd == b.GOPATH || wd+string(os.PathSeparator) == b.GOPATH {
+				break
+			}
+			p = filepath.Join(wd, "vendor", name)
+			if fi, err = os.Stat(p); err == nil && (fi.IsDir() || gpath.IsLink(fi)) {
+				info.Path = p
+				info.Loc = dependency.LocVendor
+				info.Vendored = true
+				return info
+			}
+		}
+	}
+	// Check $GOPATH
+	for _, r := range strings.Split(b.GOPATH, ":") {
+		p = filepath.Join(r, "src", name)
+		if fi, err = os.Stat(p); err == nil && (fi.IsDir() || gpath.IsLink(fi)) {
+			info.Path = p
+			info.Loc = dependency.LocGopath
+			return info
+		}
+	}
+
+	// Check $GOROOT
+	for _, r := range strings.Split(b.GOROOT, ":") {
+		p = filepath.Join(r, "src", name)
+		if fi, err = os.Stat(p); err == nil && (fi.IsDir() || gpath.IsLink(fi)) {
+			info.Path = p
+			info.Loc = dependency.LocGoroot
+			return info
+		}
+	}
+
+	// Finally, if this is "C", we're dealing with cgo
+	if name == "C" {
+		info.Loc = dependency.LocCgo
+	}
+
+	return info
+}
+
+// copyList copies an existing list to a new list.
+func copyList(l *list.List) *list.List {
+	n := list.New()
+	for e := l.Front(); e != nil; e = e.Next() {
+		n.PushBack(e.Value.(string))
+	}
+	return n
+}
+
+// findInList searches a list haystack for a string needle.
+func findInList(n string, l *list.List) bool {
+	for e := l.Front(); e != nil; e = e.Next() {
+		if e.Value.(string) == n {
+			return true
+		}
+	}
+
+	return false
+}
diff --git a/tree/tree_test.go b/tree/tree_test.go
new file mode 100644
index 0000000..19f1b62
--- /dev/null
+++ b/tree/tree_test.go
@@ -0,0 +1,34 @@
+/* Package tree contains functions for printing a dependency tree.
+
+The future of the tree functionality is uncertain, as it is neither core to
+the functionality of Glide, nor particularly complementary. Its principal use
+case is for debugging the generated dependency tree.
+
+Currently, the tree package builds its dependency tree in a slightly different
+way than the `dependency` package does. This should not make any practical
+difference, though code-wise it would be nice to change this over to use the
+`dependency` resolver.
+*/
+package tree
+
+import (
+	"container/list"
+	"testing"
+)
+
+func TestFindInTree(t *testing.T) {
+	l := list.New()
+	l.PushBack("github.com/Masterminds/glide")
+	l.PushBack("github.com/Masterminds/vcs")
+	l.PushBack("github.com/Masterminds/semver")
+
+	f := findInList("foo", l)
+	if f != false {
+		t.Error("findInList found true instead of false")
+	}
+
+	f = findInList("github.com/Masterminds/vcs", l)
+	if f != true {
+		t.Error("findInList found false instead of true")
+	}
+}
diff --git a/util/normalizename_test.go b/util/normalizename_test.go
new file mode 100644
index 0000000..fe1f77b
--- /dev/null
+++ b/util/normalizename_test.go
@@ -0,0 +1,21 @@
+package util
+
+import (
+	"testing"
+)
+
+func TestNormalizeName(t *testing.T) {
+	packages := map[string]string{
+		"github.com/Masterminds/cookoo/web/io/foo": "github.com/Masterminds/cookoo",
+		"golang.org/x/crypto/ssh":                  "golang.org/x/crypto",
+		"incomplete/example":                       "incomplete/example",
+		"net":                                      "net",
+	}
+	for start, expected := range packages {
+		if finish, extra := NormalizeName(start); expected != finish {
+			t.Errorf("Expected '%s', got '%s'", expected, finish)
+		} else if start != finish && start != finish+"/"+extra {
+			t.Errorf("Expected %s to end with %s", finish, extra)
+		}
+	}
+}
diff --git a/util/util.go b/util/util.go
index cd26d31..bf949c1 100644
--- a/util/util.go
+++ b/util/util.go
@@ -9,6 +9,7 @@
 	"net/url"
 	"os"
 	"os/exec"
+	"path/filepath"
 	"regexp"
 	"strings"
 
@@ -228,9 +229,33 @@
 	build.Context
 }
 
+// PackageName attempts to determine the name of the base package.
+//
+// If resolution fails, this will return "main".
+func (b *BuildCtxt) PackageName(base string) string {
+	cwd, err := os.Getwd()
+	if err != nil {
+		return "main"
+	}
+
+	pkg, err := b.Import(base, cwd, 0)
+	if err != nil {
+		// There may not be any top level Go source files but the project may
+		// still be within the GOPATH.
+		if strings.HasPrefix(base, b.GOPATH) {
+			p := strings.TrimPrefix(base, b.GOPATH)
+			return strings.Trim(p, string(os.PathSeparator))
+		}
+	}
+
+	return pkg.ImportPath
+}
+
 // GetBuildContext returns a build context from go/build. When the $GOROOT
 // variable is not set in the users environment it sets the context's root
 // path to the path returned by 'go env GOROOT'.
+//
+// TODO: This should be moved to the `dependency` package.
 func GetBuildContext() (*BuildCtxt, error) {
 	buildContext := &BuildCtxt{build.Default}
 	if goRoot := os.Getenv("GOROOT"); len(goRoot) == 0 {
@@ -243,3 +268,33 @@
 	}
 	return buildContext, nil
 }
+
+// NormalizeName takes a package name and normalizes it to the top level package.
+//
+// For example, golang.org/x/crypto/ssh becomes golang.org/x/crypto. 'ssh' is
+// returned as extra data.
+//
+// FIXME: Is this deprecated?
+func NormalizeName(name string) (string, string) {
+	// Fastpath check if a name in the GOROOT. There is an issue when a pkg
+	// is in the GOROOT and GetRootFromPackage tries to look it up because it
+	// expects remote names.
+	b, err := GetBuildContext()
+	if err == nil {
+		p := filepath.Join(b.GOROOT, "src", name)
+		if _, err := os.Stat(p); err == nil {
+			return name, ""
+		}
+	}
+
+	root := GetRootFromPackage(name)
+	extra := strings.TrimPrefix(name, root)
+	if len(extra) > 0 && extra != "/" {
+		extra = strings.TrimPrefix(extra, "/")
+	} else {
+		// If extra is / (which is what it would be here) we want to return ""
+		extra = ""
+	}
+
+	return root, extra
+}