gofmt pass
diff --git a/jpath/lexer.go b/jpath/lexer.go
index 0d19868..dc830fe 100644
--- a/jpath/lexer.go
+++ b/jpath/lexer.go
@@ -6,8 +6,8 @@
 package jpath
 
 import (
-  . "github.com/pelletier/go-toml"
 	"fmt"
+	. "github.com/pelletier/go-toml"
 	"regexp"
 	"strconv"
 	"strings"
@@ -27,42 +27,42 @@
 const (
 	tokenError tokenType = iota
 	tokenEOF
-  tokenKey
-  tokenString
-  tokenFloat
-  tokenInteger
-  tokenAtCost
-  tokenDollar
-  tokenLBracket
-  tokenRBracket
-  tokenDot
-  tokenDotDot
-  tokenStar
-  tokenComma
-  tokenColon
-  tokenQuestion
-  tokenLParen
-  tokenRParen
+	tokenKey
+	tokenString
+	tokenFloat
+	tokenInteger
+	tokenAtCost
+	tokenDollar
+	tokenLBracket
+	tokenRBracket
+	tokenDot
+	tokenDotDot
+	tokenStar
+	tokenComma
+	tokenColon
+	tokenQuestion
+	tokenLParen
+	tokenRParen
 )
 
 var tokenTypeNames = []string{
 	"EOF",
-  "Key",
-  "String",
-  "Float",
-  "Integer",
-  "@",
-  "$",
-  "[",
-  "]",
-  ".",
-  "..",
-  "*",
-  ",",
-  ":",
-  "?",
-  "(",
-  ")",
+	"Key",
+	"String",
+	"Float",
+	"Integer",
+	"@",
+	"$",
+	"[",
+	"]",
+	".",
+	"..",
+	"*",
+	",",
+	":",
+	"?",
+	"(",
+	")",
 }
 
 type token struct {
@@ -126,7 +126,7 @@
 	depth      int
 	line       int
 	col        int
-  stringTerm string
+	stringTerm string
 }
 
 func (l *lexer) run() {
@@ -221,67 +221,67 @@
 type stateFn func(*lexer) stateFn
 
 func lexVoid(l *lexer) stateFn {
-  for {
-    next := l.peek()
-    switch next {
-    case '$':
-      l.pos++
-      l.emit(tokenDollar)
-      continue
-    case '.':
-      if l.follow("..") {
-        l.pos += 2
-        l.emit(tokenDotDot)
-      } else {
-        l.pos++
-        l.emit(tokenDot)
-      }
-      continue
-    case '@':
-      l.pos++
-      l.emit(tokenAtCost)
-      continue
-    case '[':
-      l.pos++
-      l.emit(tokenLBracket)
-      continue
-    case ']':
-      l.pos++
-      l.emit(tokenRBracket)
-      continue
-    case ',':
-      l.pos++
-      l.emit(tokenComma)
-      continue
-    case '*':
-      l.pos++
-      l.emit(tokenStar)
-      continue
-    case '(':
-      l.pos++
-      l.emit(tokenLParen)
-      continue
-    case ')':
-      l.pos++
-      l.emit(tokenRParen)
-      continue
-    case '?':
-      l.pos++
-      l.emit(tokenQuestion)
-      continue
-    case ':':
-      l.pos++
-      l.emit(tokenColon)
-      continue
-    case '\'':
-      l.ignore()
-      l.stringTerm = string(next)
-      return lexString
-    case '"':
-      l.ignore()
-      l.stringTerm = string(next)
-      return lexString
-    }
+	for {
+		next := l.peek()
+		switch next {
+		case '$':
+			l.pos++
+			l.emit(tokenDollar)
+			continue
+		case '.':
+			if l.follow("..") {
+				l.pos += 2
+				l.emit(tokenDotDot)
+			} else {
+				l.pos++
+				l.emit(tokenDot)
+			}
+			continue
+		case '@':
+			l.pos++
+			l.emit(tokenAtCost)
+			continue
+		case '[':
+			l.pos++
+			l.emit(tokenLBracket)
+			continue
+		case ']':
+			l.pos++
+			l.emit(tokenRBracket)
+			continue
+		case ',':
+			l.pos++
+			l.emit(tokenComma)
+			continue
+		case '*':
+			l.pos++
+			l.emit(tokenStar)
+			continue
+		case '(':
+			l.pos++
+			l.emit(tokenLParen)
+			continue
+		case ')':
+			l.pos++
+			l.emit(tokenRParen)
+			continue
+		case '?':
+			l.pos++
+			l.emit(tokenQuestion)
+			continue
+		case ':':
+			l.pos++
+			l.emit(tokenColon)
+			continue
+		case '\'':
+			l.ignore()
+			l.stringTerm = string(next)
+			return lexString
+		case '"':
+			l.ignore()
+			l.stringTerm = string(next)
+			return lexString
+		}
 
 		if isAlphanumeric(next) {
 			return lexKey
@@ -291,7 +291,7 @@
 			return lexNumber
 		}
 
-    if isAlphanumeric(next) {
+		if isAlphanumeric(next) {
 			return lexKey
 		}
 
@@ -299,28 +299,28 @@
 			l.ignore()
 		}
 
-    if l.next() == eof {
+		if l.next() == eof {
 			break
-    }
+		}
 
-    return l.errorf("unexpected char: '%v'", next)
-  }
+		return l.errorf("unexpected char: '%v'", next)
+	}
 	l.emit(tokenEOF)
 	return nil
 }
 
 func lexKey(l *lexer) stateFn {
-  for {
-    next := l.peek()
-    if !isAlphanumeric(next) {
-      l.emit(tokenKey)
-      return lexVoid
-    }
+	for {
+		next := l.peek()
+		if !isAlphanumeric(next) {
+			l.emit(tokenKey)
+			return lexVoid
+		}
 
-    if l.next() == eof {
+		if l.next() == eof {
 			break
-    }
-  }
+		}
+	}
 	l.emit(tokenEOF)
 	return nil
 }
diff --git a/jpath/lexer_test.go b/jpath/lexer_test.go
index 8eb5fb5..5e24497 100644
--- a/jpath/lexer_test.go
+++ b/jpath/lexer_test.go
@@ -1,9 +1,8 @@
-
 package jpath
 
 import (
-  . "github.com/pelletier/go-toml"
-  "testing"
+	. "github.com/pelletier/go-toml"
+	"testing"
 )
 
 func testFlow(t *testing.T, input string, expectedFlow []token) {
diff --git a/jpath/match.go b/jpath/match.go
index cae83f3..9496168 100644
--- a/jpath/match.go
+++ b/jpath/match.go
@@ -1,7 +1,7 @@
 package jpath
 
 import (
-  . "github.com/pelletier/go-toml"
+	. "github.com/pelletier/go-toml"
 )
 
 type QueryPath []PathFn
@@ -9,107 +9,106 @@
 type PathFn func(context interface{}, next QueryPath)
 
 func (path QueryPath) Fn(context interface{}) {
-  path[0](context, path[1:])
+	path[0](context, path[1:])
 }
 
 func treeValue(tree *TomlTree, key string) interface{} {
-  return tree.GetPath([]string{key})
+	return tree.GetPath([]string{key})
 }
 
 func matchKeyFn(name string) PathFn {
-  return func(context interface{}, next QueryPath) {
-    if tree, ok := context.(*TomlTree); ok {
-      item := treeValue(tree, name)
-      if item != nil {
-        next.Fn(item)
-      }
-    }
-  }
+	return func(context interface{}, next QueryPath) {
+		if tree, ok := context.(*TomlTree); ok {
+			item := treeValue(tree, name)
+			if item != nil {
+				next.Fn(item)
+			}
+		}
+	}
 }
 
 func matchIndexFn(idx int) PathFn {
-  return func(context interface{}, next QueryPath) {
-    if arr, ok := context.([]interface{}); ok {
-      if idx < len(arr) && idx >= 0 {
-        next.Fn(arr[idx])
-      }
-    }
-  }
+	return func(context interface{}, next QueryPath) {
+		if arr, ok := context.([]interface{}); ok {
+			if idx < len(arr) && idx >= 0 {
+				next.Fn(arr[idx])
+			}
+		}
+	}
 }
 
 func matchSliceFn(start, end, step int) PathFn {
-  return func(context interface{}, next QueryPath) {
-    if arr, ok := context.([]interface{}); ok {
-      // adjust indexes for negative values, reverse ordering
-      realStart, realEnd := start, end
-      if realStart < 0 {
-        realStart = len(arr) + realStart
-      }
-      if realEnd < 0 {
-        realEnd = len(arr) + realEnd
-      }
-      if realEnd < realStart {
-        realEnd, realStart = realStart, realEnd  // swap
-      }
-      // loop and gather
-      for idx := realStart; idx < realEnd; idx += step {
-        next.Fn(arr[idx])
-      }
-    }
-  }
+	return func(context interface{}, next QueryPath) {
+		if arr, ok := context.([]interface{}); ok {
+			// adjust indexes for negative values, reverse ordering
+			realStart, realEnd := start, end
+			if realStart < 0 {
+				realStart = len(arr) + realStart
+			}
+			if realEnd < 0 {
+				realEnd = len(arr) + realEnd
+			}
+			if realEnd < realStart {
+				realEnd, realStart = realStart, realEnd // swap
+			}
+			// loop and gather
+			for idx := realStart; idx < realEnd; idx += step {
+				next.Fn(arr[idx])
+			}
+		}
+	}
 }
 
 func matchAnyFn() PathFn {
-  return func(context interface{}, next QueryPath) {
-    if tree, ok := context.(*TomlTree); ok {
-      for _, key := range tree.Keys() {
-        item := treeValue(tree, key)
-        next.Fn(item)
-      }
-    }
-  }
+	return func(context interface{}, next QueryPath) {
+		if tree, ok := context.(*TomlTree); ok {
+			for _, key := range tree.Keys() {
+				item := treeValue(tree, key)
+				next.Fn(item)
+			}
+		}
+	}
 }
 
 func matchUnionFn(union QueryPath) PathFn {
-  return func(context interface{}, next QueryPath) {
-    for _, fn := range union {
-      fn(context, next)
-    }
-  }
+	return func(context interface{}, next QueryPath) {
+		for _, fn := range union {
+			fn(context, next)
+		}
+	}
 }
 
 func matchRecurseFn() PathFn {
-  return func(context interface{}, next QueryPath) {
-    if tree, ok := context.(*TomlTree); ok {
-      var visit func(tree *TomlTree)
-      visit = func(tree *TomlTree) {
-        for _, key := range tree.Keys() {
-          item := treeValue(tree, key)
-          next.Fn(item)
-          switch node := item.(type) {
-          case *TomlTree:
-            visit(node)
-          case []*TomlTree:
-            for _, subtree := range node {
-              visit(subtree)
-            }
-          }
-        }
-      }
-      visit(tree)
-    }
-  }
+	return func(context interface{}, next QueryPath) {
+		if tree, ok := context.(*TomlTree); ok {
+			var visit func(tree *TomlTree)
+			visit = func(tree *TomlTree) {
+				for _, key := range tree.Keys() {
+					item := treeValue(tree, key)
+					next.Fn(item)
+					switch node := item.(type) {
+					case *TomlTree:
+						visit(node)
+					case []*TomlTree:
+						for _, subtree := range node {
+							visit(subtree)
+						}
+					}
+				}
+			}
+			visit(tree)
+		}
+	}
 }
 
-
 func processPath(path QueryPath, context interface{}) []interface{} {
-  // terminate the path with a collection funciton
-  result := []interface{}{}
-  newPath := append(path, func(context interface{}, next QueryPath) {
-    result = append(result, context)
-  })
+	// terminate the path with a collection funciton
+	result := []interface{}{}
+	newPath := append(path, func(context interface{}, next QueryPath) {
+		result = append(result, context)
+	})
 
-  // execute the path
-  newPath.Fn(context)
-  return result
+	// execute the path
+	newPath.Fn(context)
+	return result
 }
diff --git a/jpath/parser.go b/jpath/parser.go
index 33f188c..9fcd0c3 100644
--- a/jpath/parser.go
+++ b/jpath/parser.go
@@ -2,14 +2,14 @@
 
 import (
 	"fmt"
-  "math"
+	"math"
 	"strconv"
 )
 
 type parser struct {
-	flow          chan token
-	tokensBuffer  []token
-  path          []PathFn
+	flow         chan token
+	tokensBuffer []token
+	path         []PathFn
 }
 
 type parserStateFn func(*parser) parserStateFn
@@ -38,7 +38,7 @@
 	if !ok {
 		return nil
 	}
-  p.backup(&tok)
+	p.backup(&tok)
 	return &tok
 }
 
@@ -55,9 +55,8 @@
 	return &tok
 }
 
-
 func (p *parser) appendPath(fn PathFn) {
-  p.path = append(p.path, fn)
+	p.path = append(p.path, fn)
 }
 
 func parseStart(p *parser) parserStateFn {
@@ -67,29 +66,29 @@
 		return nil
 	}
 
-  if tok.typ != tokenDollar {
-    p.raiseError(tok, "Expected '$' at start of expression")
-  }
+	if tok.typ != tokenDollar {
+		p.raiseError(tok, "Expected '$' at start of expression")
+	}
 
-  return parseMatchExpr
+	return parseMatchExpr
 }
 
 func parseMatchExpr(p *parser) parserStateFn {
 	tok := p.getToken()
 	switch tok.typ {
 	case tokenDot:
-    p.appendPath(matchKeyFn(tok.val))
-    return parseMatchExpr
-  case tokenDotDot:
-    p.appendPath(matchRecurseFn())
+		p.appendPath(matchKeyFn(tok.val))
+		return parseMatchExpr
+	case tokenDotDot:
+		p.appendPath(matchRecurseFn())
 		return parseSimpleMatchExpr
 	case tokenLBracket:
 		return parseBracketExpr
-  case tokenStar:
-    p.appendPath(matchAnyFn())
-    return parseMatchExpr
-  case tokenEOF:
-    return nil  // allow EOF at this stage
+	case tokenStar:
+		p.appendPath(matchAnyFn())
+		return parseMatchExpr
+	case tokenEOF:
+		return nil // allow EOF at this stage
 	}
 	p.raiseError(tok, "expected match expression")
 	return nil
@@ -101,124 +100,124 @@
 	case tokenLBracket:
 		return parseBracketExpr
 	case tokenKey:
-    p.appendPath(matchKeyFn(tok.val))
-    return parseMatchExpr
-  case tokenStar:
-    p.appendPath(matchAnyFn())
-    return parseMatchExpr
+		p.appendPath(matchKeyFn(tok.val))
+		return parseMatchExpr
+	case tokenStar:
+		p.appendPath(matchAnyFn())
+		return parseMatchExpr
 	}
 	p.raiseError(tok, "expected match expression")
 	return nil
 }
 
 func parseBracketExpr(p *parser) parserStateFn {
-  tok := p.peek()
-  switch tok.typ {
-  case tokenInteger:
-    // look ahead for a ':'
-    p.getToken()
-    next := p.peek()
-    p.backup(tok)
-    if next.typ == tokenColon {
-      return parseSliceExpr
-    }
-    return parseUnionExpr
-  case tokenColon:
-    return parseSliceExpr
+	tok := p.peek()
+	switch tok.typ {
+	case tokenInteger:
+		// look ahead for a ':'
+		p.getToken()
+		next := p.peek()
+		p.backup(tok)
+		if next.typ == tokenColon {
+			return parseSliceExpr
+		}
+		return parseUnionExpr
+	case tokenColon:
+		return parseSliceExpr
 	}
 	return parseUnionExpr
 }
 
 func parseUnionExpr(p *parser) parserStateFn {
-  union := []PathFn{}
-  for {
-    // parse sub expression
-    tok := p.getToken()
-    switch tok.typ {
-    case tokenInteger:
-      idx, _ := strconv.Atoi(tok.val)
-      union = append(union, matchIndexFn(idx))
-    case tokenKey:
-      union = append(union, matchKeyFn(tok.val))
-    case tokenQuestion:
-      return parseFilterExpr
-    case tokenLParen:
-      return parseScriptExpr
-    default:
-      p.raiseError(tok, "expected union sub expression")
-    }
-    // parse delimiter or terminator
-    tok = p.getToken()
-    switch tok.typ {
-    case tokenComma:
-      continue
-    case tokenRBracket:
-      break
-    default:
-      p.raiseError(tok, "expected ',' or ']'")
-    }
-  }
-  p.appendPath(matchUnionFn(union))
-  return parseMatchExpr
+	union := []PathFn{}
+	for {
+		// parse sub expression
+		tok := p.getToken()
+		switch tok.typ {
+		case tokenInteger:
+			idx, _ := strconv.Atoi(tok.val)
+			union = append(union, matchIndexFn(idx))
+		case tokenKey:
+			union = append(union, matchKeyFn(tok.val))
+		case tokenQuestion:
+			return parseFilterExpr
+		case tokenLParen:
+			return parseScriptExpr
+		default:
+			p.raiseError(tok, "expected union sub expression")
+		}
+		// parse delimiter or terminator
+		tok = p.getToken()
+		switch tok.typ {
+		case tokenComma:
+			continue
+		case tokenRBracket:
+			break
+		default:
+			p.raiseError(tok, "expected ',' or ']'")
+		}
+	}
+	p.appendPath(matchUnionFn(union))
+	return parseMatchExpr
 }
 
 func parseSliceExpr(p *parser) parserStateFn {
-  // init slice to grab all elements
-  start, end, step := 0, math.MaxInt64, 1
+	// init slice to grab all elements
+	start, end, step := 0, math.MaxInt64, 1
 
-  // parse optional start
-  tok := p.getToken()
-  if tok.typ == tokenInteger {
-    start, _ = strconv.Atoi(tok.val)
-    tok = p.getToken()
-  }
-  if tok.typ != tokenColon {
-    p.raiseError(tok, "expected ':'")
-  }
+	// parse optional start
+	tok := p.getToken()
+	if tok.typ == tokenInteger {
+		start, _ = strconv.Atoi(tok.val)
+		tok = p.getToken()
+	}
+	if tok.typ != tokenColon {
+		p.raiseError(tok, "expected ':'")
+	}
 
-  // parse optional end
-  tok = p.getToken()
-  if tok.typ == tokenInteger {
-    end, _ = strconv.Atoi(tok.val)
-    tok = p.getToken()
-  }
-  if tok.typ != tokenColon || tok.typ != tokenRBracket {
-    p.raiseError(tok, "expected ']' or ':'")
-  }
+	// parse optional end
+	tok = p.getToken()
+	if tok.typ == tokenInteger {
+		end, _ = strconv.Atoi(tok.val)
+		tok = p.getToken()
+	}
+	if tok.typ != tokenColon || tok.typ != tokenRBracket {
+		p.raiseError(tok, "expected ']' or ':'")
+	}
 
-  // parse optional step
-  tok = p.getToken()
-  if tok.typ == tokenInteger {
-    step, _ = strconv.Atoi(tok.val)
-    if step < 0 {
-      p.raiseError(tok, "step must be a positive value")
-    }
-    tok = p.getToken()
-  }
-  if tok.typ != tokenRBracket {
-    p.raiseError(tok, "expected ']'")
-  }
+	// parse optional step
+	tok = p.getToken()
+	if tok.typ == tokenInteger {
+		step, _ = strconv.Atoi(tok.val)
+		if step < 0 {
+			p.raiseError(tok, "step must be a positive value")
+		}
+		tok = p.getToken()
+	}
+	if tok.typ != tokenRBracket {
+		p.raiseError(tok, "expected ']'")
+	}
 
-  p.appendPath(matchSliceFn(start, end, step))
-  return parseMatchExpr
+	p.appendPath(matchSliceFn(start, end, step))
+	return parseMatchExpr
 }
 
 func parseFilterExpr(p *parser) parserStateFn {
 	p.raiseError(p.peek(), "filter expressions are unsupported")
-  return nil
+	return nil
 }
 
 func parseScriptExpr(p *parser) parserStateFn {
 	p.raiseError(p.peek(), "script expressions are unsupported")
-  return nil
+	return nil
 }
 
 func parse(flow chan token) []PathFn {
 	result := []PathFn{}
 	parser := &parser{
-		flow:          flow,
-		tokensBuffer:  []token{},
-    path:          result,
+		flow:         flow,
+		tokensBuffer: []token{},
+		path:         result,
 	}
 	parser.run()
 	return result
diff --git a/jpath/parser_test.go b/jpath/parser_test.go
index 99bc4d0..1571f02 100644
--- a/jpath/parser_test.go
+++ b/jpath/parser_test.go
@@ -2,15 +2,15 @@
 
 import (
 	"fmt"
+	. "github.com/pelletier/go-toml"
 	"testing"
-  . "github.com/pelletier/go-toml"
 )
 
 func assertQuery(t *testing.T, toml, query string, ref []interface{}) {
 	tree, err := Load(toml)
 	if err != nil {
 		t.Errorf("Non-nil toml parse error: %v", err)
-    return
+		return
 	}
 	_, flow := lex(query)
 	if err != nil {
@@ -18,65 +18,65 @@
 		return
 	}
 	path := parse(flow)
-  result := processPath(path, tree)
-  assertValue(t, result, ref, "")
+	result := processPath(path, tree)
+	assertValue(t, result, ref, "")
 }
 
 func assertValue(t *testing.T, result, ref interface{}, location string) {
-  switch node := ref.(type) {
-  case []interface{}:
-    if resultNode, ok := result.([]interface{}); !ok {
-      t.Errorf("{%s} result value not of type %T: %T",
-        location, node, resultNode)
-    } else {
-      for i, v := range node {
-        assertValue(t, resultNode[i], v, fmt.Sprintf("%s[%d]", location, i))
-      }
-    }
-  case map[string]interface{}:
-    if resultNode, ok := result.(*TomlTree); !ok {
-      t.Errorf("{%s} result value not of type %T: %T",
-        location, node, resultNode)
-    } else {
-      for k, v := range node {
-        assertValue(t, resultNode.GetPath([]string{k}), v, location + "." + k)
-      }
-    }
-  case int64:
-    if resultNode, ok := result.(int64); !ok {
-      t.Errorf("{%s} result value not of type %T: %T",
-        location, node, resultNode)
-    } else {
-      if node != resultNode {
-        t.Errorf("{%s} result value does not match", location)
-      }
-    }
-  case string:
-    if resultNode, ok := result.(string); !ok {
-      t.Errorf("{%s} result value not of type %T: %T",
-        location, node, resultNode)
-    } else {
-      if node != resultNode {
-        t.Errorf("{%s} result value does not match", location)
-      }
-    }
-  default:
-    if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", ref) {
-      t.Errorf("{%s} result value does not match: %v != %v",
-        location, node, ref)
-    }
-  }
+	switch node := ref.(type) {
+	case []interface{}:
+		if resultNode, ok := result.([]interface{}); !ok {
+			t.Errorf("{%s} result value not of type %T: %T",
+				location, node, resultNode)
+		} else {
+			for i, v := range node {
+				assertValue(t, resultNode[i], v, fmt.Sprintf("%s[%d]", location, i))
+			}
+		}
+	case map[string]interface{}:
+		if resultNode, ok := result.(*TomlTree); !ok {
+			t.Errorf("{%s} result value not of type %T: %T",
+				location, node, resultNode)
+		} else {
+			for k, v := range node {
+				assertValue(t, resultNode.GetPath([]string{k}), v, location+"."+k)
+			}
+		}
+	case int64:
+		if resultNode, ok := result.(int64); !ok {
+			t.Errorf("{%s} result value not of type %T: %T",
+				location, node, resultNode)
+		} else {
+			if node != resultNode {
+				t.Errorf("{%s} result value does not match", location)
+			}
+		}
+	case string:
+		if resultNode, ok := result.(string); !ok {
+			t.Errorf("{%s} result value not of type %T: %T",
+				location, node, resultNode)
+		} else {
+			if node != resultNode {
+				t.Errorf("{%s} result value does not match", location)
+			}
+		}
+	default:
+		if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", ref) {
+			t.Errorf("{%s} result value does not match: %v != %v",
+				location, node, ref)
+		}
+	}
 }
 
 func TestQueryRoot(t *testing.T) {
-  assertQuery(t,
-    "a = 42",
-    "$",
-	  []interface{}{
-      map[string]interface{}{
-		    "a": int64(42),
-	    },
-    })
+	assertQuery(t,
+		"a = 42",
+		"$",
+		[]interface{}{
+			map[string]interface{}{
+				"a": int64(42),
+			},
+		})
 }
 
 /*