Working on faster flattening.
diff --git a/cmd/flatten.go b/cmd/flatten.go
index bb0f055..f49d3b5 100644
--- a/cmd/flatten.go
+++ b/cmd/flatten.go
@@ -4,11 +4,14 @@
 	"io/ioutil"
 	"os"
 	"path"
+	"path/filepath"
 	"strings"
 
 	"github.com/Masterminds/cookoo"
 	"github.com/Masterminds/glide/cfg"
-	"github.com/Masterminds/glide/util"
+	"github.com/Masterminds/glide/dependency"
+	"github.com/Masterminds/glide/msg"
+	//"github.com/Masterminds/glide/util"
 	"github.com/Masterminds/semver"
 )
 
@@ -56,10 +59,17 @@
 
 	f := &flattening{conf, vend, vend, deps, packages}
 
+	pkgs, err := findAllProjects(f, strings.TrimSuffix(vend, "/vendor"))
+	if err != nil {
+		return conf, err
+	}
+	conf.Imports = pkgs
+	return conf, nil
+
 	// The assumption here is that once something has been scanned once in a
 	// run, there is no need to scan it again.
 	scanned := map[string]bool{}
-	err := recFlatten(f, force, home, cache, cacheGopath, skipGopath, scanned)
+	err = recFlatten(f, force, home, cache, cacheGopath, skipGopath, scanned)
 	if err != nil {
 		return conf, err
 	}
@@ -143,6 +153,40 @@
 	return nil
 }
 
+// Get a list of all projects.
+func findAllProjects(f *flattening, top string) ([]*cfg.Dependency, error) {
+
+	seen := map[string]bool{}
+	for _, imp := range f.conf.Imports {
+		seen[imp.Name] = true
+	}
+
+	res, err := dependency.NewResolver(top)
+	if err != nil {
+		return []*cfg.Dependency{}, err
+	}
+
+	prjs, err := res.ResolveAll(f.conf.Imports)
+	if err != nil {
+		return []*cfg.Dependency{}, err
+	}
+
+	out := make([]*cfg.Dependency, len(f.conf.Imports))
+	copy(out, f.conf.Imports)
+	for _, p := range prjs {
+		p, err = filepath.Rel(f.top, p)
+		if err != nil {
+			Warn("Rel: %s", err)
+		}
+
+		p, _ = NormalizeName(p)
+		if !seen[p] {
+			out = append(out, &cfg.Dependency{Name: p})
+		}
+	}
+	return out, nil
+}
+
 // flattenGlideUp does a glide update in the middle of a flatten operation.
 //
 // While this is expensive, it is also necessary to make sure we have the
@@ -264,65 +308,107 @@
 // This always returns true because it always handles the job of searching
 // for dependencies. So generally it should be the last merge strategy
 // that you try.
-func mergeGuess(dir, pkg string, deps map[string]*cfg.Dependency, vend string, scanned map[string]bool) ([]string, bool) {
-	Info("Scanning %s for dependencies.", pkg)
-	buildContext, err := GetBuildContext()
+func mergeGuess(fullpath, pkg string, deps map[string]*cfg.Dependency, vend string, scanned map[string]bool) ([]string, bool) {
+	if scanned[pkg] {
+		return []string{}, true
+	}
+	scanned[pkg] = true
+
+	vendor := strings.TrimSuffix(fullpath, pkg)
+	Info("Scanning %s for dependencies in %s.", pkg, vendor)
+	resolver, err := dependency.NewResolver(strings.TrimSuffix(vendor, "vendor/"))
 	if err != nil {
 		Warn("Could not scan package %q: %s", pkg, err)
 		return []string{}, false
 	}
 
-	res := []string{}
-
-	if _, err := os.Stat(dir); err != nil {
-		Warn("Directory is missing: %s", dir)
-		return res, true
+	resolver.Handler = &InstallMissingPackagesHandler{
+		Vendor:         vendor,
+		Home:           vendor,
+		UseCache:       false,
+		UseCacheGopath: false,
+		SkipGopath:     true,
 	}
 
-	d := walkDeps(buildContext, dir, pkg)
-	for _, oname := range d {
-		if _, ok := scanned[oname]; ok {
-			//Info("===> Scanned %s already. Skipping", name)
+	resolved, err := resolver.ResolveAll([]*cfg.Dependency{&cfg.Dependency{Name: pkg}})
+	if err != nil {
+		msg.Error("Failed to resolve %s: %s", pkg, err)
+	}
+
+	cp := []string{} // := make([]string, 0, len(resolved))
+	for _, d := range resolved {
+		d, err = filepath.Rel(vendor, d)
+		if err != nil {
+			msg.Warn("Failed to get relative path of %s", d)
+		} else if d == "." || d == "" {
 			continue
 		}
-		Debug("=> Scanning %s", oname)
-		name, _ := NormalizeName(oname)
-		//if _, ok := deps[name]; ok {
-		//scanned[oname] = true
-		//Debug("====> Seen %s already. Skipping", name)
-		//continue
-		//}
+		d, _ = NormalizeName(d)
 
-		found := findPkg(buildContext, name, dir)
-		switch found.PType {
-		case ptypeUnknown:
-			Info("==> Unknown %s (%s)", name, oname)
-			Debug("✨☆ Undownloaded dependency: %s", name)
-			repo := util.GetRootFromPackage(name)
-			nd := &cfg.Dependency{
-				Name:       name,
-				Repository: "https://" + repo,
-			}
-			deps[name] = nd
-			res = append(res, name)
-		case ptypeGoroot, ptypeCgo:
-			scanned[oname] = true
-			// Why do we break rather than continue?
-			break
-		default:
-			// We're looking for dependencies that might exist in $GOPATH
-			// but not be on vendor. We add any that are on $GOPATH.
-			if _, ok := deps[name]; !ok {
-				Debug("✨☆ GOPATH dependency: %s", name)
-				nd := &cfg.Dependency{Name: name}
-				deps[name] = nd
-				res = append(res, name)
-			}
-			scanned[oname] = true
+		// Just in case self comes up here, we skip it.
+		if d == pkg || d == "" {
+			continue
 		}
+		cp = append(cp, d)
+		//scanned[d] = true
 	}
 
-	return res, true
+	msg.Info("Returning %v (len: %d)", cp, len(cp))
+	return cp, true
+
+	/*
+		res := []string{}
+
+		if _, err := os.Stat(dir); err != nil {
+			Warn("Directory is missing: %s", dir)
+			return res, true
+		}
+
+		d := walkDeps(buildContext, dir, pkg)
+		for _, oname := range d {
+			if _, ok := scanned[oname]; ok {
+				//Info("===> Scanned %s already. Skipping", name)
+				continue
+			}
+			Debug("=> Scanning %s", oname)
+			name, _ := NormalizeName(oname)
+			//if _, ok := deps[name]; ok {
+			//scanned[oname] = true
+			//Debug("====> Seen %s already. Skipping", name)
+			//continue
+			//}
+
+			found := findPkg(buildContext, name, dir)
+			switch found.PType {
+			case ptypeUnknown:
+				Info("==> Unknown %s (%s)", name, oname)
+				Debug("✨☆ Undownloaded dependency: %s", name)
+				repo := util.GetRootFromPackage(name)
+				nd := &cfg.Dependency{
+					Name:       name,
+					Repository: "https://" + repo,
+				}
+				deps[name] = nd
+				res = append(res, name)
+			case ptypeGoroot, ptypeCgo:
+				scanned[oname] = true
+				// Why do we break rather than continue?
+				break
+			default:
+				// We're looking for dependencies that might exist in $GOPATH
+				// but not be on vendor. We add any that are on $GOPATH.
+				if _, ok := deps[name]; !ok {
+					Debug("✨☆ GOPATH dependency: %s", name)
+					nd := &cfg.Dependency{Name: name}
+					deps[name] = nd
+					res = append(res, name)
+				}
+				scanned[oname] = true
+			}
+		}
+
+		return res, true
+	*/
 }
 
 // mergeDeps merges any dependency array into deps.
@@ -453,3 +539,23 @@
 	}
 	return mod
 }
+
+// InstallMissingPackagesHandler is a missing package handler that installs missing packages.
+type InstallMissingPackagesHandler struct {
+	Vendor, Home                         string
+	UseCache, UseCacheGopath, SkipGopath bool
+}
+
+func (i *InstallMissingPackagesHandler) NotFound(pkg string) (bool, error) {
+	d := &cfg.Dependency{Name: pkg}
+	dest := filepath.Join(i.Vendor, pkg)
+	msg.Info("Cloning %s into %s", pkg, dest)
+	if err := VcsGet(d, dest, i.Home, i.UseCache, i.UseCacheGopath, i.SkipGopath); err != nil {
+		return false, err
+	}
+	return true, nil
+}
+
+func (i *InstallMissingPackagesHandler) OnGopath(pkg string) (bool, error) {
+	return i.NotFound(pkg)
+}
diff --git a/cmd/get_imports.go b/cmd/get_imports.go
index ff0d810..590ebae 100644
--- a/cmd/get_imports.go
+++ b/cmd/get_imports.go
@@ -254,6 +254,11 @@
 // VcsGet figures out how to fetch a dependency, and then gets it.
 //
 // VcsGet installs into the dest.
+//
+// dep is the dependency to try to install. dest is the destination. home is the
+// (what?). cache indicates whether to store in a local cache. cacheGopath indicates
+// whether to store a copy in Gopath, and skipGopath prevents copying from Gopath
+// into the present project.
 func VcsGet(dep *cfg.Dependency, dest, home string, cache, cacheGopath, skipGopath bool) error {
 	// When not skipping the $GOPATH look in it for a copy of the package
 	if !skipGopath {
diff --git a/cmd/update_references.go b/cmd/update_references.go
index 92fe235..176d972 100644
--- a/cmd/update_references.go
+++ b/cmd/update_references.go
@@ -1,10 +1,11 @@
 package cmd
 
 import (
-	"path"
+	"path/filepath"
 
 	"github.com/Masterminds/cookoo"
 	"github.com/Masterminds/glide/cfg"
+	"github.com/Masterminds/glide/dependency"
 )
 
 // UpdateReferences updates the revision numbers on all of the imports.
@@ -70,35 +71,76 @@
 
 func discoverDependencyTree(f *flattening) error {
 	Debug("---> Inspecting %s for dependencies (%d packages).\n", f.curr, len(f.scan))
-	scanned := map[string]bool{}
-	for _, imp := range f.scan {
-		Debug("----> Scanning %s", imp)
-		base := path.Join(f.top, imp)
-		mod := []string{}
-		if m, ok := mergeGlide(base, imp, f.deps, f.top); ok {
-			mod = m
-		} else if m, ok = mergeGodep(base, imp, f.deps, f.top); ok {
-			mod = m
-		} else if m, ok = mergeGPM(base, imp, f.deps, f.top); ok {
-			mod = m
-		} else if m, ok = mergeGb(base, imp, f.deps, f.top); ok {
-			mod = m
-		} else if m, ok = mergeGuess(base, imp, f.deps, f.top, scanned); ok {
-			mod = m
-		}
 
-		if len(mod) > 0 {
-			Debug("----> Looking for dependencies in %q (%d)", imp, len(mod))
-			f2 := &flattening{
-				conf: f.conf,
-				top:  f.top,
-				curr: base,
-				deps: f.deps,
-				scan: mod}
-			discoverDependencyTree(f2)
+	// projects tracks which projects we have seen. Initially, it's everytning
+	// in f.deps. As we go, we'll merge in all of the others that we find.
+	projects := f.deps
+
+	// Get all of the packages that are used.
+	resolver, err := dependency.NewResolver(f.top)
+	if err != nil {
+		return err
+	}
+
+	dlist := make([]*cfg.Dependency, 0, len(f.deps))
+	for _, d := range f.deps {
+		dlist = append(dlist, d)
+	}
+	pkgs, err := resolver.ResolveAll(dlist)
+	if err != nil {
+		return err
+	}
+
+	// From the packages, we just want the repositories. So we get a normalized
+	// list of dependencies.
+	for _, d := range pkgs {
+		d, err = filepath.Rel(f.top, d)
+		if err != nil {
+			Warn("Cannot resolve relative path: %s", err)
+		}
+		d, _ := NormalizeName(d)
+
+		if _, ok := projects[d]; !ok {
+			projects[d] = &cfg.Dependency{Name: d}
+			Info("====> %s", d)
 		}
 	}
 
+	// At this point, we know that we have an exhaustive list of packages, so
+	// we can now just look for files that will tell us what version of each
+	// package to use.
+
+	/*
+		scanned := map[string]bool{}
+		for _, imp := range f.scan {
+			Debug("----> Scanning %s", imp)
+			base := path.Join(f.top, imp)
+			mod := []string{}
+			if m, ok := mergeGlide(base, imp, f.deps, f.top); ok {
+				mod = m
+			} else if m, ok = mergeGodep(base, imp, f.deps, f.top); ok {
+				mod = m
+			} else if m, ok = mergeGPM(base, imp, f.deps, f.top); ok {
+				mod = m
+			} else if m, ok = mergeGb(base, imp, f.deps, f.top); ok {
+				mod = m
+			} else if m, ok = mergeGuess(base, imp, f.deps, f.top, scanned); ok {
+				mod = m
+			}
+
+			if len(mod) > 0 {
+				Debug("----> Looking for dependencies in %q (%d)", imp, len(mod))
+				f2 := &flattening{
+					conf: f.conf,
+					top:  f.top,
+					curr: base,
+					deps: f.deps,
+					scan: mod}
+				discoverDependencyTree(f2)
+			}
+		}
+	*/
+
 	return nil
 }
 
diff --git a/dependency/resolver.go b/dependency/resolver.go
index 943c3aa..120afae 100644
--- a/dependency/resolver.go
+++ b/dependency/resolver.go
@@ -137,7 +137,7 @@
 // there.
 func (r *Resolver) Resolve(pkg, basepath string) ([]string, error) {
 	target := filepath.Join(basepath, pkg)
-	//msg.Debug("Scanning %s", target)
+	msg.Info("Resolving %s", target)
 	l := list.New()
 	l.PushBack(target)
 	return r.resolveList(l)