Merge pull request #23 from pelletier/jsonpath

Powerful querying interface
diff --git a/README.md b/README.md
index db8e427..509eb18 100644
--- a/README.md
+++ b/README.md
@@ -8,6 +8,23 @@
 [![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml)
 [![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml)
 
+## Features
+
+Go-toml provides the following features for using data parsed from TOML documents:
+
+* Load TOML documents from files and string data
+* Easily navigate TOML structure using TomlTree
+* Line & column position data for all parsed elements
+* Query support similar to JSON-Path
+* Syntax errors contain line and column numbers
+
+Go-toml is designed to help cover use-cases not covered by reflection-based TOML parsing:
+
+* Semantic evaluation of parsed TOML
+* Informing a user of mistakes in the source document, after it has been parsed
+* Programatic handling of default values on a case-by-case basis
+* Using a TOML document as a flexible data-store
+
 ## Import
 
     import "github.com/pelletier/go-toml"
@@ -45,34 +62,22 @@
     user = configTree.Get("user").(string)
     password = configTree.Get("password").(string)
     fmt.Println("User is ", user, ". Password is ", password)
+
+    // show where elements are in the file
+    fmt.Println("User position: %v", configTree.GetPosition("user"))
+    fmt.Println("Password position: %v", configTree.GetPosition("password"))
+
+    // use a query to gather elements without walking the tree
+    results, _ := config.Query("$..[user,password]")
+    for ii, item := range results.Values() {
+      fmt.Println("Query result %d: %v", ii, item)
+    }
 }
 ```
 
-### Dealing with values
-
-Here are some important functions you need to know in order to work with the
-values in a TOML tree:
-
-* `tree.Get("comma.separated.path")` Returns the value at the given path in the
-  tree as an `interface{}`. It's up to you to cast the result into the right
-  type.
-* `tree.Set("comma.separated.path", value)` Sets the value at the given path in
-  the tree, creating all necessary intermediate subtrees.
-
-### Dealing with positions
-
-Since
-[e118479061](https://github.com/pelletier/go-toml/commit/e1184790610b20d0541fc9f57c181cc5b1fc78be),
-go-toml supports positions. This feature allows you to track the positions of
-the values inside the source document, for example to provide better feedback in
-your application. Using positions works much like values:
-
-* `tree.GetPosition("comma.separated.path")` Returns the position of the given
-  path in the source.
-
 ## Documentation
 
-The documentation is available at
+The documentation and additional examples are available at
 [godoc.org](http://godoc.org/github.com/pelletier/go-toml).
 
 ## Contribute
diff --git a/doc.go b/doc.go
new file mode 100644
index 0000000..a231199
--- /dev/null
+++ b/doc.go
@@ -0,0 +1,245 @@
+// Package toml is a TOML markup language parser.
+//
+// This version supports the specification as described in
+// https://github.com/toml-lang/toml/blob/master/versions/toml-v0.2.0.md
+//
+// TOML Parsing
+//
+// TOML data may be parsed in two ways: by file, or by string.
+//
+//   // load TOML data by filename
+//   tree, err := toml.LoadFile("filename.toml")
+//
+//   // load TOML data stored in a string
+//   tree, err := toml.Load(stringContainingTomlData)
+//
+// Either way, the result is a TomlTree object that can be used to navigate the
+// structure and data within the original document.
+//
+//
+// Getting data from the TomlTree
+//
+// After parsing TOML data with Load() or LoadFile(), use the Has() and Get()
+// methods on the returned TomlTree, to find your way through the document data.
+//
+//   if tree.Has('foo') {
+//     fmt.Prinln("foo is: %v", tree.Get('foo'))
+//   }
+//
+// Working with Paths
+//
+// Go-toml has support for basic dot-separated key paths on the Has(), Get(), Set()
+// and GetDefault() methods.  These are the same kind of key paths used within the
+// TOML specification for struct tames.
+//
+//   // looks for a key named 'baz', within struct 'bar', within struct 'foo'
+//   tree.Has("foo.bar.baz")
+//
+//   // returns the key at this path, if it is there
+//   tree.Get("foo.bar.baz")
+//
+// TOML allows keys to contain '.', which can cause this syntax to be problematic
+// for some documents.  In such cases, use the GetPath(), HasPath(), and SetPath(),
+// methods to explicitly define the path.  This form is also faster, since
+// it avoids having to parse the passed key for '.' delimiters.
+//
+//   // looks for a key named 'baz', within struct 'bar', within struct 'foo'
+//   tree.HasPath(string{}{"foo","bar","baz"})
+//
+//   // returns the key at this path, if it is there
+//   tree.GetPath(string{}{"foo","bar","baz"})
+//
+// Note that this is distinct from the heavyweight query syntax supported by
+// TomlTree.Query() and the Query() struct (see below).
+//
+// Position Support
+//
+// Each element within the TomlTree is stored with position metadata, which is
+// invaluable for providing semantic feedback to a user.  This helps in
+// situations where the TOML file parses correctly, but contains data that is
+// not correct for the application.  In such cases, an error message can be
+// generated that indicates the problem line and column number in the source
+// TOML document.
+//
+//   // load TOML data
+//   tree, _ := toml.Load("filename.toml")
+//
+//   // get an entry and report an error if it's the wrong type
+//   element := tree.Get("foo")
+//   if value, ok := element.(int64); !ok {
+//       return fmt.Errorf("%v: Element 'foo' must be an integer", tree.GetPosition("foo"))
+//   }
+//
+//   // report an error if an expected element is missing
+//   if !tree.Has("bar") {
+//      return fmt.Errorf("%v: Expected 'bar' element", tree.GetPosition(""))
+//   }
+//
+// Query Support
+//
+// The TOML query path implementation is based loosely on the JSONPath specification:
+// http://goessner.net/articles/JsonPath/
+//
+// The idea behind a query path is to allow quick access to any element, or set
+// of elements within TOML document, with a single expression.
+//
+//   result := tree.Query("$.foo.bar.baz")  // result is 'nil' if the path is not present
+//
+// This is equivalent to:
+//
+//   next := tree.Get("foo")
+//   if next != nil {
+//     next = next.Get("bar")
+//     if next != nil {
+//       next = next.Get("baz")
+//     }
+//   }
+//   result := next
+//
+// As illustrated above, the query path is much more efficient, especially since
+// the structure of the TOML file can vary.  Rather than making assumptions about
+// a document's structure, a query allows the programmer to make structured
+// requests into the document, and get zero or more values as a result.
+//
+// The syntax of a query begins with a root token, followed by any number
+// sub-expressions:
+//
+//   $
+//                    Root of the TOML tree.  This must always come first.
+//   .name
+//                    Selects child of this node, where 'name' is a TOML key
+//                    name.
+//   ['name']
+//                    Selects child of this node, where 'name' is a string
+//                    containing a TOML key name.
+//   [index]
+//                    Selcts child array element at 'index'.
+//   ..expr
+//                    Recursively selects all children, filtered by an a union,
+//                    index, or slice expression.
+//   ..*
+//                    Recursive selection of all nodes at this point in the
+//                    tree.
+//   .*
+//                    Selects all children of the current node.
+//   [expr,expr]
+//                    Union operator - a logical 'or' grouping of two or more
+//                    sub-expressions: index, key name, or filter.
+//   [start:end:step]
+//                    Slice operator - selects array elements from start to
+//                    end-1, at the given step.  All three arguments are
+//                    optional.
+//   [?(filter)]
+//                    Named filter expression - the function 'filter' is
+//                    used to filter children at this node.
+//
+// Query Indexes And Slices
+//
+// Index expressions perform no bounds checking, and will contribute no
+// values to the result set if the provided index or index range is invalid.
+// Negative indexes represent values from the end of the array, counting backwards.
+//
+//   // select the last index of the array named 'foo'
+//   tree.Query("$.foo[-1]")
+//
+// Slice expressions are supported, by using ':' to separate a start/end index pair.
+//
+//   // select up to the first five elements in the array
+//   tree.Query("$.foo[0:5]")
+//
+// Slice expressions also allow negative indexes for the start and stop
+// arguments.
+//
+//   // select all array elements.
+//   tree.Query("$.foo[0:-1]")
+//
+// Slice expressions may have an optional stride/step parameter:
+//
+//   // select every other element
+//   tree.Query("$.foo[0:-1:2]")
+//
+// Slice start and end parameters are also optional:
+//
+//   // these are all equivalent and select all the values in the array
+//   tree.Query("$.foo[:]")
+//   tree.Query("$.foo[0:]")
+//   tree.Query("$.foo[:-1]")
+//   tree.Query("$.foo[0:-1:]")
+//   tree.Query("$.foo[::1]")
+//   tree.Query("$.foo[0::1]")
+//   tree.Query("$.foo[:-1:1]")
+//   tree.Query("$.foo[0:-1:1]")
+//
+// Query Filters
+//
+// Query filters are used within a Union [,] or single Filter [] expression.
+// A filter only allows nodes that qualify through to the next expression,
+// and/or into the result set.
+//
+//   // returns children of foo that are permitted by the 'bar' filter.
+//   tree.Query("$.foo[?(bar)]")
+//
+// There are several filters provided with the library:
+//
+//   tree
+//          Allows nodes of type TomlTree.
+//   int
+//          Allows nodes of type int64.
+//   float
+//          Allows nodes of type float64.
+//   string
+//          Allows nodes of type string.
+//   time
+//          Allows nodes of type time.Time.
+//   bool
+//          Allows nodes of type bool.
+//
+// Query Results
+//
+// An executed query returns a QueryResult object.  This contains the nodes
+// in the TOML tree that qualify the query expression.  Position information
+// is also available for each value in the set.
+//
+//   // display the results of a query
+//   results := tree.Query("$.foo.bar.baz")
+//   for idx, value := results.Values() {
+//       fmt.Println("%v: %v", results.Positions()[idx], value)
+//   }
+//
+// Compiled Queries
+//
+// Queries may be executed directly on a TomlTree object, or compiled ahead
+// of time and executed discretely.  The former is more convienent, but has the
+// penalty of having to recompile the query expression each time.
+//
+//   // basic query
+//   results := tree.Query("$.foo.bar.baz")
+//
+//   // compiled query
+//   query := toml.CompileQuery("$.foo.bar.baz")
+//   results := query.Execute(tree)
+//
+//   // run the compiled query again on a different tree
+//   moreResults := query.Execute(anotherTree)
+//
+// User Defined Query Filters
+//
+// Filter expressions may also be user defined by using the SetFilter()
+// function on the Query object.  The function must return true/false, which
+// signifies if the passed node is kept or discarded, respectively.
+//
+//   // create a query that references a user-defined filter
+//   query, _ := CompileQuery("$[?(bazOnly)]")
+//
+//   // define the filter, and assign it to the query
+//   query.SetFilter("bazOnly", func(node interface{}) bool{
+//       if tree, ok := node.(*TomlTree); ok {
+//           return tree.Has("baz")
+//       }
+//       return false  // reject all other node types
+//   })
+//
+//   // run the query
+//   query.Execute(tree)
+//
+package toml
diff --git a/doc_test.go b/doc_test.go
new file mode 100644
index 0000000..7c0e677
--- /dev/null
+++ b/doc_test.go
@@ -0,0 +1,81 @@
+// code examples for godoc
+
+package toml
+
+import (
+	"fmt"
+)
+
+func ExampleNodeFilterFn_filterExample() {
+	tree, _ := Load(`
+      [struct_one]
+      foo = "foo"
+      bar = "bar"
+
+      [struct_two]
+      baz = "baz"
+      gorf = "gorf"
+    `)
+
+	// create a query that references a user-defined-filter
+	query, _ := CompileQuery("$[?(bazOnly)]")
+
+	// define the filter, and assign it to the query
+	query.SetFilter("bazOnly", func(node interface{}) bool {
+		if tree, ok := node.(*TomlTree); ok {
+			return tree.Has("baz")
+		}
+		return false // reject all other node types
+	})
+
+	// results contain only the 'struct_two' TomlTree
+	query.Execute(tree)
+}
+
+func ExampleQuery_queryExample() {
+	config, _ := Load(`
+      [[book]]
+      title = "The Stand"
+      author = "Stephen King"
+      [[book]]
+      title = "For Whom the Bell Tolls"
+      author = "Ernest Hemmingway"
+      [[book]]
+      title = "Neuromancer"
+      author = "William Gibson"
+    `)
+
+	// find and print all the authors in the document
+	authors, _ := config.Query("$.book.author")
+	for _, name := range authors.Values() {
+		fmt.Println(name)
+	}
+}
+
+func Example_comprehensiveExample() {
+	config, err := LoadFile("config.toml")
+
+	if err != nil {
+		fmt.Println("Error ", err.Error())
+	} else {
+		// retrieve data directly
+		user := config.Get("postgres.user").(string)
+		password := config.Get("postgres.password").(string)
+
+		// or using an intermediate object
+		configTree := config.Get("postgres").(*TomlTree)
+		user = configTree.Get("user").(string)
+		password = configTree.Get("password").(string)
+		fmt.Println("User is ", user, ". Password is ", password)
+
+		// show where elements are in the file
+		fmt.Println("User position: %v", configTree.GetPosition("user"))
+		fmt.Println("Password position: %v", configTree.GetPosition("password"))
+
+		// use a query to gather elements without walking the tree
+		results, _ := config.Query("$..[user,password]")
+		for ii, item := range results.Values() {
+			fmt.Println("Query result %d: %v", ii, item)
+		}
+	}
+}
diff --git a/lexer.go b/lexer.go
index b057da5..c1b4998 100644
--- a/lexer.go
+++ b/lexer.go
@@ -10,115 +10,16 @@
 	"regexp"
 	"strconv"
 	"strings"
-	"unicode"
 	"unicode/utf8"
 )
 
 var dateRegexp *regexp.Regexp
 
-// Define tokens
-type tokenType int
-
-const (
-	eof = -(iota + 1)
-)
-
-const (
-	tokenError tokenType = iota
-	tokenEOF
-	tokenComment
-	tokenKey
-	tokenEqual
-	tokenString
-	tokenInteger
-	tokenTrue
-	tokenFalse
-	tokenFloat
-	tokenLeftBracket
-	tokenRightBracket
-	tokenDoubleLeftBracket
-	tokenDoubleRightBracket
-	tokenDate
-	tokenKeyGroup
-	tokenKeyGroupArray
-	tokenComma
-	tokenEOL
-)
-
-var tokenTypeNames = []string{
-	"EOF",
-	"Comment",
-	"Key",
-	"=",
-	"\"",
-	"Integer",
-	"True",
-	"False",
-	"Float",
-	"[",
-	"[",
-	"]]",
-	"[[",
-	"Date",
-	"KeyGroup",
-	"KeyGroupArray",
-	",",
-	"EOL",
-}
-
-type token struct {
-	Position
-	typ tokenType
-	val string
-}
-
-func (tt tokenType) String() string {
-	idx := int(tt)
-	if idx < len(tokenTypeNames) {
-		return tokenTypeNames[idx]
-	}
-	return "Unknown"
-}
-
-func (i token) String() string {
-	switch i.typ {
-	case tokenEOF:
-		return "EOF"
-	case tokenError:
-		return i.val
-	}
-
-	if len(i.val) > 10 {
-		return fmt.Sprintf("%.10q...", i.val)
-	}
-	return fmt.Sprintf("%q", i.val)
-}
-
-func isSpace(r rune) bool {
-	return r == ' ' || r == '\t'
-}
-
-func isAlphanumeric(r rune) bool {
-	return unicode.IsLetter(r) || r == '_'
-}
-
-func isKeyChar(r rune) bool {
-	// "Keys start with the first non-whitespace character and end with the last
-	// non-whitespace character before the equals sign."
-	return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=')
-}
-
-func isDigit(r rune) bool {
-	return unicode.IsNumber(r)
-}
-
-func isHexDigit(r rune) bool {
-	return isDigit(r) ||
-		r == 'A' || r == 'B' || r == 'C' || r == 'D' || r == 'E' || r == 'F'
-}
+// Define state functions
+type tomlLexStateFn func() tomlLexStateFn
 
 // Define lexer
-type lexer struct {
+type tomlLexer struct {
 	input  string
 	start  int
 	pos    int
@@ -129,14 +30,14 @@
 	col    int
 }
 
-func (l *lexer) run() {
-	for state := lexVoid; state != nil; {
-		state = state(l)
+func (l *tomlLexer) run() {
+	for state := l.lexVoid; state != nil; {
+		state = state()
 	}
 	close(l.tokens)
 }
 
-func (l *lexer) nextStart() {
+func (l *tomlLexer) nextStart() {
 	// iterate by runes (utf8 characters)
 	// search for newlines and advance line/col counts
 	for i := l.start; i < l.pos; {
@@ -153,7 +54,7 @@
 	l.start = l.pos
 }
 
-func (l *lexer) emit(t tokenType) {
+func (l *tomlLexer) emit(t tokenType) {
 	l.tokens <- token{
 		Position: Position{l.line, l.col},
 		typ:      t,
@@ -162,7 +63,7 @@
 	l.nextStart()
 }
 
-func (l *lexer) emitWithValue(t tokenType, value string) {
+func (l *tomlLexer) emitWithValue(t tokenType, value string) {
 	l.tokens <- token{
 		Position: Position{l.line, l.col},
 		typ:      t,
@@ -171,7 +72,7 @@
 	l.nextStart()
 }
 
-func (l *lexer) next() rune {
+func (l *tomlLexer) next() rune {
 	if l.pos >= len(l.input) {
 		l.width = 0
 		return eof
@@ -182,15 +83,15 @@
 	return r
 }
 
-func (l *lexer) ignore() {
+func (l *tomlLexer) ignore() {
 	l.nextStart()
 }
 
-func (l *lexer) backup() {
+func (l *tomlLexer) backup() {
 	l.pos -= l.width
 }
 
-func (l *lexer) errorf(format string, args ...interface{}) stateFn {
+func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
 	l.tokens <- token{
 		Position: Position{l.line, l.col},
 		typ:      tokenError,
@@ -199,13 +100,13 @@
 	return nil
 }
 
-func (l *lexer) peek() rune {
+func (l *tomlLexer) peek() rune {
 	r := l.next()
 	l.backup()
 	return r
 }
 
-func (l *lexer) accept(valid string) bool {
+func (l *tomlLexer) accept(valid string) bool {
 	if strings.IndexRune(valid, l.next()) >= 0 {
 		return true
 	}
@@ -213,23 +114,20 @@
 	return false
 }
 
-func (l *lexer) follow(next string) bool {
+func (l *tomlLexer) follow(next string) bool {
 	return strings.HasPrefix(l.input[l.pos:], next)
 }
 
-// Define state functions
-type stateFn func(*lexer) stateFn
-
-func lexVoid(l *lexer) stateFn {
+func (l *tomlLexer) lexVoid() tomlLexStateFn {
 	for {
 		next := l.peek()
 		switch next {
 		case '[':
-			return lexKeyGroup
+			return l.lexKeyGroup
 		case '#':
-			return lexComment
+			return l.lexComment
 		case '=':
-			return lexEqual
+			return l.lexEqual
 		}
 
 		if isSpace(next) {
@@ -237,11 +135,11 @@
 		}
 
 		if l.depth > 0 {
-			return lexRvalue
+			return l.lexRvalue
 		}
 
 		if isKeyChar(next) {
-			return lexKey
+			return l.lexKey
 		}
 
 		if l.next() == eof {
@@ -253,7 +151,7 @@
 	return nil
 }
 
-func lexRvalue(l *lexer) stateFn {
+func (l *tomlLexer) lexRvalue() tomlLexStateFn {
 	for {
 		next := l.peek()
 		switch next {
@@ -263,43 +161,43 @@
 			return l.errorf("cannot have multiple equals for the same key")
 		case '[':
 			l.depth++
-			return lexLeftBracket
+			return l.lexLeftBracket
 		case ']':
 			l.depth--
-			return lexRightBracket
+			return l.lexRightBracket
 		case '#':
-			return lexComment
+			return l.lexComment
 		case '"':
-			return lexString
+			return l.lexString
 		case ',':
-			return lexComma
+			return l.lexComma
 		case '\n':
 			l.ignore()
 			l.pos++
 			if l.depth == 0 {
-				return lexVoid
+				return l.lexVoid
 			}
-			return lexRvalue
+			return l.lexRvalue
 		}
 
 		if l.follow("true") {
-			return lexTrue
+			return l.lexTrue
 		}
 
 		if l.follow("false") {
-			return lexFalse
+			return l.lexFalse
 		}
 
 		if isAlphanumeric(next) {
-			return lexKey
+			return l.lexKey
 		}
 
 		if dateRegexp.FindString(l.input[l.pos:]) != "" {
-			return lexDate
+			return l.lexDate
 		}
 
 		if next == '+' || next == '-' || isDigit(next) {
-			return lexNumber
+			return l.lexNumber
 		}
 
 		if isSpace(next) {
@@ -315,51 +213,51 @@
 	return nil
 }
 
-func lexDate(l *lexer) stateFn {
+func (l *tomlLexer) lexDate() tomlLexStateFn {
 	l.ignore()
 	l.pos += 20 // Fixed size of a date in TOML
 	l.emit(tokenDate)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexTrue(l *lexer) stateFn {
+func (l *tomlLexer) lexTrue() tomlLexStateFn {
 	l.ignore()
 	l.pos += 4
 	l.emit(tokenTrue)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexFalse(l *lexer) stateFn {
+func (l *tomlLexer) lexFalse() tomlLexStateFn {
 	l.ignore()
 	l.pos += 5
 	l.emit(tokenFalse)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexEqual(l *lexer) stateFn {
+func (l *tomlLexer) lexEqual() tomlLexStateFn {
 	l.ignore()
 	l.accept("=")
 	l.emit(tokenEqual)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexComma(l *lexer) stateFn {
+func (l *tomlLexer) lexComma() tomlLexStateFn {
 	l.ignore()
 	l.accept(",")
 	l.emit(tokenComma)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexKey(l *lexer) stateFn {
+func (l *tomlLexer) lexKey() tomlLexStateFn {
 	l.ignore()
 	for isKeyChar(l.next()) {
 	}
 	l.backup()
 	l.emit(tokenKey)
-	return lexVoid
+	return l.lexVoid
 }
 
-func lexComment(l *lexer) stateFn {
+func (l *tomlLexer) lexComment() tomlLexStateFn {
 	for {
 		next := l.next()
 		if next == '\n' || next == eof {
@@ -367,17 +265,17 @@
 		}
 	}
 	l.ignore()
-	return lexVoid
+	return l.lexVoid
 }
 
-func lexLeftBracket(l *lexer) stateFn {
+func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
 	l.ignore()
 	l.pos++
 	l.emit(tokenLeftBracket)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexString(l *lexer) stateFn {
+func (l *tomlLexer) lexString() tomlLexStateFn {
 	l.pos++
 	l.ignore()
 	growingString := ""
@@ -387,7 +285,7 @@
 			l.emitWithValue(tokenString, growingString)
 			l.pos++
 			l.ignore()
-			return lexRvalue
+			return l.lexRvalue
 		}
 
 		if l.follow("\\\"") {
@@ -446,7 +344,7 @@
 	return l.errorf("unclosed string")
 }
 
-func lexKeyGroup(l *lexer) stateFn {
+func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
 	l.ignore()
 	l.pos++
 
@@ -454,14 +352,14 @@
 		// token '[[' signifies an array of anonymous key groups
 		l.pos++
 		l.emit(tokenDoubleLeftBracket)
-		return lexInsideKeyGroupArray
+		return l.lexInsideKeyGroupArray
 	}
 	// vanilla key group
 	l.emit(tokenLeftBracket)
-	return lexInsideKeyGroup
+	return l.lexInsideKeyGroup
 }
 
-func lexInsideKeyGroupArray(l *lexer) stateFn {
+func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
 	for {
 		if l.peek() == ']' {
 			if l.pos > l.start {
@@ -474,7 +372,7 @@
 			}
 			l.pos++
 			l.emit(tokenDoubleRightBracket)
-			return lexVoid
+			return l.lexVoid
 		} else if l.peek() == '[' {
 			return l.errorf("group name cannot contain ']'")
 		}
@@ -486,7 +384,7 @@
 	return l.errorf("unclosed key group array")
 }
 
-func lexInsideKeyGroup(l *lexer) stateFn {
+func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
 	for {
 		if l.peek() == ']' {
 			if l.pos > l.start {
@@ -495,7 +393,7 @@
 			l.ignore()
 			l.pos++
 			l.emit(tokenRightBracket)
-			return lexVoid
+			return l.lexVoid
 		} else if l.peek() == '[' {
 			return l.errorf("group name cannot contain ']'")
 		}
@@ -507,14 +405,14 @@
 	return l.errorf("unclosed key group")
 }
 
-func lexRightBracket(l *lexer) stateFn {
+func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
 	l.ignore()
 	l.pos++
 	l.emit(tokenRightBracket)
-	return lexRvalue
+	return l.lexRvalue
 }
 
-func lexNumber(l *lexer) stateFn {
+func (l *tomlLexer) lexNumber() tomlLexStateFn {
 	l.ignore()
 	if !l.accept("+") {
 		l.accept("-")
@@ -550,7 +448,7 @@
 	} else {
 		l.emit(tokenInteger)
 	}
-	return lexRvalue
+	return l.lexRvalue
 }
 
 func init() {
@@ -558,13 +456,13 @@
 }
 
 // Entry point
-func lex(input string) (*lexer, chan token) {
-	l := &lexer{
+func lexToml(input string) chan token {
+	l := &tomlLexer{
 		input:  input,
 		tokens: make(chan token),
 		line:   1,
 		col:    1,
 	}
 	go l.run()
-	return l, l.tokens
+	return l.tokens
 }
diff --git a/lexer_test.go b/lexer_test.go
index 20483d7..5114223 100644
--- a/lexer_test.go
+++ b/lexer_test.go
@@ -3,7 +3,7 @@
 import "testing"
 
 func testFlow(t *testing.T, input string, expectedFlow []token) {
-	_, ch := lex(input)
+	ch := lexToml(input)
 	for _, expected := range expectedFlow {
 		token := <-ch
 		if token != expected {
diff --git a/match.go b/match.go
new file mode 100644
index 0000000..423a869
--- /dev/null
+++ b/match.go
@@ -0,0 +1,227 @@
+package toml
+
+import (
+	"fmt"
+)
+
+// support function to set positions for tomlValues
+// NOTE: this is done to allow ctx.lastPosition to indicate the start of any
+// values returned by the query engines
+func tomlValueCheck(node interface{}, ctx *queryContext) interface{} {
+	switch castNode := node.(type) {
+	case *tomlValue:
+		ctx.lastPosition = castNode.position
+		return castNode.value
+	case []*TomlTree:
+		if len(castNode) > 0 {
+			ctx.lastPosition = castNode[0].position
+		}
+		return node
+	default:
+		return node
+	}
+}
+
+// base match
+type matchBase struct {
+	next pathFn
+}
+
+func (f *matchBase) setNext(next pathFn) {
+	f.next = next
+}
+
+// terminating functor - gathers results
+type terminatingFn struct {
+	// empty
+}
+
+func newTerminatingFn() *terminatingFn {
+	return &terminatingFn{}
+}
+
+func (f *terminatingFn) setNext(next pathFn) {
+	// do nothing
+}
+
+func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
+	switch castNode := node.(type) {
+	case *TomlTree:
+		ctx.result.appendResult(node, castNode.position)
+	case *tomlValue:
+		ctx.result.appendResult(node, castNode.position)
+	default:
+		// use last position for scalars
+		ctx.result.appendResult(node, ctx.lastPosition)
+	}
+}
+
+// match single key
+type matchKeyFn struct {
+	matchBase
+	Name string
+}
+
+func newMatchKeyFn(name string) *matchKeyFn {
+	return &matchKeyFn{Name: name}
+}
+
+func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
+	if tree, ok := node.(*TomlTree); ok {
+		item := tree.values[f.Name]
+		if item != nil {
+			f.next.call(item, ctx)
+		}
+	}
+}
+
+// match single index
+type matchIndexFn struct {
+	matchBase
+	Idx int
+}
+
+func newMatchIndexFn(idx int) *matchIndexFn {
+	return &matchIndexFn{Idx: idx}
+}
+
+func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
+	if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
+		if f.Idx < len(arr) && f.Idx >= 0 {
+			f.next.call(arr[f.Idx], ctx)
+		}
+	}
+}
+
+// filter by slicing
+type matchSliceFn struct {
+	matchBase
+	Start, End, Step int
+}
+
+func newMatchSliceFn(start, end, step int) *matchSliceFn {
+	return &matchSliceFn{Start: start, End: end, Step: step}
+}
+
+func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
+	if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
+		// adjust indexes for negative values, reverse ordering
+		realStart, realEnd := f.Start, f.End
+		if realStart < 0 {
+			realStart = len(arr) + realStart
+		}
+		if realEnd < 0 {
+			realEnd = len(arr) + realEnd
+		}
+		if realEnd < realStart {
+			realEnd, realStart = realStart, realEnd // swap
+		}
+		// loop and gather
+		for idx := realStart; idx < realEnd; idx += f.Step {
+			f.next.call(arr[idx], ctx)
+		}
+	}
+}
+
+// match anything
+type matchAnyFn struct {
+	matchBase
+}
+
+func newMatchAnyFn() *matchAnyFn {
+	return &matchAnyFn{}
+}
+
+func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
+	if tree, ok := node.(*TomlTree); ok {
+		for _, v := range tree.values {
+			f.next.call(v, ctx)
+		}
+	}
+}
+
+// filter through union
+type matchUnionFn struct {
+	Union []pathFn
+}
+
+func (f *matchUnionFn) setNext(next pathFn) {
+	for _, fn := range f.Union {
+		fn.setNext(next)
+	}
+}
+
+func (f *matchUnionFn) call(node interface{}, ctx *queryContext) {
+	for _, fn := range f.Union {
+		fn.call(node, ctx)
+	}
+}
+
+// match every single last node in the tree
+type matchRecursiveFn struct {
+	matchBase
+}
+
+func newMatchRecursiveFn() *matchRecursiveFn {
+	return &matchRecursiveFn{}
+}
+
+func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
+	if tree, ok := node.(*TomlTree); ok {
+		var visit func(tree *TomlTree)
+		visit = func(tree *TomlTree) {
+			for _, v := range tree.values {
+				f.next.call(v, ctx)
+				switch node := v.(type) {
+				case *TomlTree:
+					visit(node)
+				case []*TomlTree:
+					for _, subtree := range node {
+						visit(subtree)
+					}
+				}
+			}
+		}
+		f.next.call(tree, ctx)
+		visit(tree)
+	}
+}
+
+// match based on an externally provided functional filter
+type matchFilterFn struct {
+	matchBase
+	Pos  Position
+	Name string
+}
+
+func newMatchFilterFn(name string, pos Position) *matchFilterFn {
+	return &matchFilterFn{Name: name, Pos: pos}
+}
+
+func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
+	fn, ok := (*ctx.filters)[f.Name]
+	if !ok {
+		panic(fmt.Sprintf("%s: query context does not have filter '%s'",
+			f.Pos, f.Name))
+	}
+	switch castNode := tomlValueCheck(node, ctx).(type) {
+	case *TomlTree:
+		for _, v := range castNode.values {
+			if tv, ok := v.(*tomlValue); ok {
+				if fn(tv.value) {
+					f.next.call(v, ctx)
+				}
+			} else {
+				if fn(v) {
+					f.next.call(v, ctx)
+				}
+			}
+		}
+	case []interface{}:
+		for _, v := range castNode {
+			if fn(v) {
+				f.next.call(v, ctx)
+			}
+		}
+	}
+}
diff --git a/match_test.go b/match_test.go
new file mode 100644
index 0000000..c594eb5
--- /dev/null
+++ b/match_test.go
@@ -0,0 +1,202 @@
+package toml
+
+import (
+	"fmt"
+	"math"
+	"testing"
+)
+
+// dump path tree to a string
+func pathString(root pathFn) string {
+	result := fmt.Sprintf("%T:", root)
+	switch fn := root.(type) {
+	case *terminatingFn:
+		result += "{}"
+	case *matchKeyFn:
+		result += fmt.Sprintf("{%s}", fn.Name)
+		result += pathString(fn.next)
+	case *matchIndexFn:
+		result += fmt.Sprintf("{%d}", fn.Idx)
+		result += pathString(fn.next)
+	case *matchSliceFn:
+		result += fmt.Sprintf("{%d:%d:%d}",
+			fn.Start, fn.End, fn.Step)
+		result += pathString(fn.next)
+	case *matchAnyFn:
+		result += "{}"
+		result += pathString(fn.next)
+	case *matchUnionFn:
+		result += "{["
+		for _, v := range fn.Union {
+			result += pathString(v) + ", "
+		}
+		result += "]}"
+	case *matchRecursiveFn:
+		result += "{}"
+		result += pathString(fn.next)
+	case *matchFilterFn:
+		result += fmt.Sprintf("{%s}", fn.Name)
+		result += pathString(fn.next)
+	}
+	return result
+}
+
+func assertPathMatch(t *testing.T, path, ref *Query) bool {
+	pathStr := pathString(path.root)
+	refStr := pathString(ref.root)
+	if pathStr != refStr {
+		t.Errorf("paths do not match")
+		t.Log("test:", pathStr)
+		t.Log("ref: ", refStr)
+		return false
+	}
+	return true
+}
+
+func assertPath(t *testing.T, query string, ref *Query) {
+	path, _ := parseQuery(lexQuery(query))
+	assertPathMatch(t, path, ref)
+}
+
+func buildPath(parts ...pathFn) *Query {
+	query := newQuery()
+	for _, v := range parts {
+		query.appendPath(v)
+	}
+	return query
+}
+
+func TestPathRoot(t *testing.T) {
+	assertPath(t,
+		"$",
+		buildPath(
+		// empty
+		))
+}
+
+func TestPathKey(t *testing.T) {
+	assertPath(t,
+		"$.foo",
+		buildPath(
+			newMatchKeyFn("foo"),
+		))
+}
+
+func TestPathBracketKey(t *testing.T) {
+	assertPath(t,
+		"$[foo]",
+		buildPath(
+			newMatchKeyFn("foo"),
+		))
+}
+
+func TestPathBracketStringKey(t *testing.T) {
+	assertPath(t,
+		"$['foo']",
+		buildPath(
+			newMatchKeyFn("foo"),
+		))
+}
+
+func TestPathIndex(t *testing.T) {
+	assertPath(t,
+		"$[123]",
+		buildPath(
+			newMatchIndexFn(123),
+		))
+}
+
+func TestPathSliceStart(t *testing.T) {
+	assertPath(t,
+		"$[123:]",
+		buildPath(
+			newMatchSliceFn(123, math.MaxInt64, 1),
+		))
+}
+
+func TestPathSliceStartEnd(t *testing.T) {
+	assertPath(t,
+		"$[123:456]",
+		buildPath(
+			newMatchSliceFn(123, 456, 1),
+		))
+}
+
+func TestPathSliceStartEndColon(t *testing.T) {
+	assertPath(t,
+		"$[123:456:]",
+		buildPath(
+			newMatchSliceFn(123, 456, 1),
+		))
+}
+
+func TestPathSliceStartStep(t *testing.T) {
+	assertPath(t,
+		"$[123::7]",
+		buildPath(
+			newMatchSliceFn(123, math.MaxInt64, 7),
+		))
+}
+
+func TestPathSliceEndStep(t *testing.T) {
+	assertPath(t,
+		"$[:456:7]",
+		buildPath(
+			newMatchSliceFn(0, 456, 7),
+		))
+}
+
+func TestPathSliceStep(t *testing.T) {
+	assertPath(t,
+		"$[::7]",
+		buildPath(
+			newMatchSliceFn(0, math.MaxInt64, 7),
+		))
+}
+
+func TestPathSliceAll(t *testing.T) {
+	assertPath(t,
+		"$[123:456:7]",
+		buildPath(
+			newMatchSliceFn(123, 456, 7),
+		))
+}
+
+func TestPathAny(t *testing.T) {
+	assertPath(t,
+		"$.*",
+		buildPath(
+			newMatchAnyFn(),
+		))
+}
+
+func TestPathUnion(t *testing.T) {
+	assertPath(t,
+		"$[foo, bar, baz]",
+		buildPath(
+			&matchUnionFn{[]pathFn{
+				newMatchKeyFn("foo"),
+				newMatchKeyFn("bar"),
+				newMatchKeyFn("baz"),
+			}},
+		))
+}
+
+func TestPathRecurse(t *testing.T) {
+	assertPath(t,
+		"$..*",
+		buildPath(
+			newMatchRecursiveFn(),
+		))
+}
+
+func TestPathFilterExpr(t *testing.T) {
+	assertPath(t,
+		"$[?('foo'),?(bar)]",
+		buildPath(
+			&matchUnionFn{[]pathFn{
+				newMatchFilterFn("foo", Position{}),
+				newMatchFilterFn("bar", Position{}),
+			}},
+		))
+}
diff --git a/parser.go b/parser.go
index c9f42a3..60918cc 100644
--- a/parser.go
+++ b/parser.go
@@ -10,7 +10,7 @@
 	"time"
 )
 
-type parser struct {
+type tomlParser struct {
 	flow          chan token
 	tree          *TomlTree
 	tokensBuffer  []token
@@ -18,20 +18,20 @@
 	seenGroupKeys []string
 }
 
-type parserStateFn func(*parser) parserStateFn
+type tomlParserStateFn func() tomlParserStateFn
 
 // Formats and panics an error message based on a token
-func (p *parser) raiseError(tok *token, msg string, args ...interface{}) {
+func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) {
 	panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...))
 }
 
-func (p *parser) run() {
-	for state := parseStart; state != nil; {
-		state = state(p)
+func (p *tomlParser) run() {
+	for state := p.parseStart; state != nil; {
+		state = state()
 	}
 }
 
-func (p *parser) peek() *token {
+func (p *tomlParser) peek() *token {
 	if len(p.tokensBuffer) != 0 {
 		return &(p.tokensBuffer[0])
 	}
@@ -44,7 +44,7 @@
 	return &tok
 }
 
-func (p *parser) assume(typ tokenType) {
+func (p *tomlParser) assume(typ tokenType) {
 	tok := p.getToken()
 	if tok == nil {
 		p.raiseError(tok, "was expecting token %s, but token stream is empty", tok)
@@ -54,7 +54,7 @@
 	}
 }
 
-func (p *parser) getToken() *token {
+func (p *tomlParser) getToken() *token {
 	if len(p.tokensBuffer) != 0 {
 		tok := p.tokensBuffer[0]
 		p.tokensBuffer = p.tokensBuffer[1:]
@@ -67,12 +67,9 @@
 	return &tok
 }
 
-func parseStart(p *parser) parserStateFn {
+func (p *tomlParser) parseStart() tomlParserStateFn {
 	tok := p.peek()
 
-	// prime position data with root tree instance
-	p.tree.position = tok.Position
-
 	// end of stream, parsing is finished
 	if tok == nil {
 		return nil
@@ -80,11 +77,11 @@
 
 	switch tok.typ {
 	case tokenDoubleLeftBracket:
-		return parseGroupArray
+		return p.parseGroupArray
 	case tokenLeftBracket:
-		return parseGroup
+		return p.parseGroup
 	case tokenKey:
-		return parseAssign
+		return p.parseAssign
 	case tokenEOF:
 		return nil
 	default:
@@ -93,7 +90,7 @@
 	return nil
 }
 
-func parseGroupArray(p *parser) parserStateFn {
+func (p *tomlParser) parseGroupArray() tomlParserStateFn {
 	startToken := p.getToken() // discard the [[
 	key := p.getToken()
 	if key.typ != tokenKeyGroupArray {
@@ -102,7 +99,7 @@
 
 	// get or create group array element at the indicated part in the path
 	keys := strings.Split(key.val, ".")
-	p.tree.createSubTree(keys[:len(keys)-1]) // create parent entries
+	p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
 	destTree := p.tree.GetPath(keys)
 	var array []*TomlTree
 	if destTree == nil {
@@ -140,10 +137,10 @@
 
 	// move to next parser state
 	p.assume(tokenDoubleRightBracket)
-	return parseStart(p)
+	return p.parseStart
 }
 
-func parseGroup(p *parser) parserStateFn {
+func (p *tomlParser) parseGroup() tomlParserStateFn {
 	startToken := p.getToken() // discard the [
 	key := p.getToken()
 	if key.typ != tokenKeyGroup {
@@ -157,20 +154,18 @@
 
 	p.seenGroupKeys = append(p.seenGroupKeys, key.val)
 	keys := strings.Split(key.val, ".")
-	if err := p.tree.createSubTree(keys); err != nil {
+	if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
 		p.raiseError(key, "%s", err)
 	}
 	p.assume(tokenRightBracket)
 	p.currentGroup = keys
-	targetTree := p.tree.GetPath(p.currentGroup).(*TomlTree)
-	targetTree.position = startToken.Position
-	return parseStart(p)
+	return p.parseStart
 }
 
-func parseAssign(p *parser) parserStateFn {
+func (p *tomlParser) parseAssign() tomlParserStateFn {
 	key := p.getToken()
 	p.assume(tokenEqual)
-	value := parseRvalue(p)
+	value := p.parseRvalue()
 	var groupKey []string
 	if len(p.currentGroup) > 0 {
 		groupKey = p.currentGroup
@@ -198,10 +193,10 @@
 			strings.Join(finalKey, "."))
 	}
 	targetNode.values[key.val] = &tomlValue{value, key.Position}
-	return parseStart(p)
+	return p.parseStart
 }
 
-func parseRvalue(p *parser) interface{} {
+func (p *tomlParser) parseRvalue() interface{} {
 	tok := p.getToken()
 	if tok == nil || tok.typ == tokenEOF {
 		p.raiseError(tok, "expecting a value")
@@ -233,7 +228,7 @@
 		}
 		return val
 	case tokenLeftBracket:
-		return parseArray(p)
+		return p.parseArray()
 	case tokenError:
 		p.raiseError(tok, "%s", tok)
 	}
@@ -243,7 +238,7 @@
 	return nil
 }
 
-func parseArray(p *parser) []interface{} {
+func (p *tomlParser) parseArray() []interface{} {
 	var array []interface{}
 	arrayType := reflect.TypeOf(nil)
 	for {
@@ -255,7 +250,7 @@
 			p.getToken()
 			return array
 		}
-		val := parseRvalue(p)
+		val := p.parseRvalue()
 		if arrayType == nil {
 			arrayType = reflect.TypeOf(val)
 		}
@@ -277,9 +272,10 @@
 	return array
 }
 
-func parse(flow chan token) *TomlTree {
+func parseToml(flow chan token) *TomlTree {
 	result := newTomlTree()
-	parser := &parser{
+	result.position = Position{1, 1}
+	parser := &tomlParser{
 		flow:          flow,
 		tree:          result,
 		tokensBuffer:  make([]token, 0),
diff --git a/parser_test.go b/parser_test.go
index 761a181..8160905 100644
--- a/parser_test.go
+++ b/parser_test.go
@@ -31,7 +31,7 @@
 
 func TestCreateSubTree(t *testing.T) {
 	tree := newTomlTree()
-	tree.createSubTree([]string{"a", "b", "c"})
+	tree.createSubTree([]string{"a", "b", "c"}, Position{})
 	tree.Set("a.b.c", 42)
 	if tree.Get("a.b.c") != 42 {
 		t.Fail()
@@ -385,7 +385,7 @@
 	for path, pos := range ref {
 		testPos := tree.GetPosition(path)
 		if testPos.Invalid() {
-			t.Errorf("Failed to query tree path: %s", path)
+			t.Errorf("Failed to query tree path or path has invalid position: %s", path)
 		} else if pos != testPos {
 			t.Errorf("Expected position %v, got %v instead", pos, testPos)
 		}
@@ -396,6 +396,7 @@
 	assertPosition(t,
 		"[foo]\nbar=42\nbaz=69",
 		map[string]Position{
+			"":        Position{1, 1},
 			"foo":     Position{1, 1},
 			"foo.bar": Position{2, 1},
 			"foo.baz": Position{3, 1},
@@ -406,6 +407,7 @@
 	assertPosition(t,
 		"  [foo]\n  bar=42\n  baz=69",
 		map[string]Position{
+			"":        Position{1, 1},
 			"foo":     Position{1, 3},
 			"foo.bar": Position{2, 3},
 			"foo.baz": Position{3, 3},
@@ -416,20 +418,21 @@
 	assertPosition(t,
 		"[[foo]]\nbar=42\nbaz=69",
 		map[string]Position{
+			"":        Position{1, 1},
 			"foo":     Position{1, 1},
 			"foo.bar": Position{2, 1},
 			"foo.baz": Position{3, 1},
 		})
 }
 
-func TestDocumentPositionsEmptyPath(t *testing.T) {
-	text := "[foo]\nbar=42\nbaz=69"
-	tree, err := Load(text)
-	if err != nil {
-		t.Errorf("Error loading document text: `%v`", text)
-		t.Errorf("Error: %v", err)
-	}
-	if pos := tree.GetPosition(""); !pos.Invalid() {
-		t.Errorf("Valid position was returned for empty path")
-	}
+func TestNestedTreePosition(t *testing.T) {
+	assertPosition(t,
+		"[foo.bar]\na=42\nb=69",
+		map[string]Position{
+			"":          Position{1, 1},
+			"foo":       Position{1, 1},
+			"foo.bar":   Position{1, 1},
+			"foo.bar.a": Position{2, 1},
+			"foo.bar.b": Position{3, 1},
+		})
 }
diff --git a/position.go b/position.go
index fd42a0d..1f26245 100644
--- a/position.go
+++ b/position.go
@@ -6,7 +6,13 @@
 	"fmt"
 )
 
-// Position within a TOML document
+/*
+  Position of a document element within a TOML document.
+
+  Line and Col are both 1-indexed positions for the element's line number and
+  column number, respectively.  Values of zero or less will cause Invalid(),
+  to return true.
+*/
 type Position struct {
 	Line int // line within the document
 	Col  int // column within the line
@@ -18,7 +24,7 @@
 	return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
 }
 
-// Invalid returns wheter or not the position is valid (i.e. with negative or
+// Returns whether or not the position is valid (i.e. with negative or
 // null values)
 func (p *Position) Invalid() bool {
 	return p.Line <= 0 || p.Col <= 0
diff --git a/query.go b/query.go
new file mode 100644
index 0000000..597f4ff
--- /dev/null
+++ b/query.go
@@ -0,0 +1,142 @@
+package toml
+
+import (
+	"time"
+)
+
+//  Type of a user-defined filter function, for use with Query.SetFilter().
+//
+//  The return value of the function must indicate if 'node' is to be included
+//  at this stage of the TOML path.  Returning true will include the node, and
+//  returning false will exclude it.
+//
+//  NOTE: Care should be taken to write script callbacks such that they are safe
+//  to use from multiple goroutines.
+type NodeFilterFn func(node interface{}) bool
+
+// The result of Executing a Query
+type QueryResult struct {
+	items     []interface{}
+	positions []Position
+}
+
+// appends a value/position pair to the result set
+func (r *QueryResult) appendResult(node interface{}, pos Position) {
+	r.items = append(r.items, node)
+	r.positions = append(r.positions, pos)
+}
+
+// Set of values within a QueryResult.  The order of values is not guaranteed
+// to be in document order, and may be different each time a query is executed.
+func (r *QueryResult) Values() []interface{} {
+	return r.items
+}
+
+// Set of positions for values within a QueryResult.  Each index in Positions()
+// corresponds to the entry in Value() of the same index.
+func (r *QueryResult) Positions() []Position {
+	return r.positions
+}
+
+// runtime context for executing query paths
+type queryContext struct {
+	result       *QueryResult
+	filters      *map[string]NodeFilterFn
+	lastPosition Position
+}
+
+// generic path functor interface
+type pathFn interface {
+	setNext(next pathFn)
+	call(node interface{}, ctx *queryContext)
+}
+
+// A Query is the representation of a compiled TOML path.  A Query is safe
+// for concurrent use by multiple goroutines.
+type Query struct {
+	root    pathFn
+	tail    pathFn
+	filters *map[string]NodeFilterFn
+}
+
+func newQuery() *Query {
+	return &Query{
+		root:    nil,
+		tail:    nil,
+		filters: &defaultFilterFunctions,
+	}
+}
+
+func (q *Query) appendPath(next pathFn) {
+	if q.root == nil {
+		q.root = next
+	} else {
+		q.tail.setNext(next)
+	}
+	q.tail = next
+	next.setNext(newTerminatingFn()) // init the next functor
+}
+
+// Compiles a TOML path expression.  The returned Query can be used to match
+// elements within a TomlTree and its descendants.
+func CompileQuery(path string) (*Query, error) {
+	return parseQuery(lexQuery(path))
+}
+
+// Executes a query against a TomlTree, and returns the result of the query.
+func (q *Query) Execute(tree *TomlTree) *QueryResult {
+	result := &QueryResult{
+		items:     []interface{}{},
+		positions: []Position{},
+	}
+	if q.root == nil {
+		result.appendResult(tree, tree.GetPosition(""))
+	} else {
+		ctx := &queryContext{
+			result:  result,
+			filters: q.filters,
+		}
+		q.root.call(tree, ctx)
+	}
+	return result
+}
+
+// Sets a user-defined filter function.  These may be used inside "?(..)" query
+// expressions to filter TOML document elements within a query.
+func (q *Query) SetFilter(name string, fn NodeFilterFn) {
+	if q.filters == &defaultFilterFunctions {
+		// clone the static table
+		q.filters = &map[string]NodeFilterFn{}
+		for k, v := range defaultFilterFunctions {
+			(*q.filters)[k] = v
+		}
+	}
+	(*q.filters)[name] = fn
+}
+
+var defaultFilterFunctions = map[string]NodeFilterFn{
+	"tree": func(node interface{}) bool {
+		_, ok := node.(*TomlTree)
+		return ok
+	},
+	"int": func(node interface{}) bool {
+		_, ok := node.(int64)
+		return ok
+	},
+	"float": func(node interface{}) bool {
+		_, ok := node.(float64)
+		return ok
+	},
+	"string": func(node interface{}) bool {
+		_, ok := node.(string)
+		return ok
+	},
+	"time": func(node interface{}) bool {
+		_, ok := node.(time.Time)
+		return ok
+	},
+	"bool": func(node interface{}) bool {
+		_, ok := node.(bool)
+		return ok
+	},
+}
diff --git a/querylexer.go b/querylexer.go
new file mode 100644
index 0000000..1532e1d
--- /dev/null
+++ b/querylexer.go
@@ -0,0 +1,339 @@
+// TOML JSONPath lexer.
+//
+// Written using the principles developed by Rob Pike in
+// http://www.youtube.com/watch?v=HxaD_trXwRE
+
+package toml
+
+import (
+	"fmt"
+	"strconv"
+	"strings"
+	"unicode/utf8"
+)
+
+// Lexer state function
+type queryLexStateFn func() queryLexStateFn
+
+// Lexer definition
+type queryLexer struct {
+	input      string
+	start      int
+	pos        int
+	width      int
+	tokens     chan token
+	depth      int
+	line       int
+	col        int
+	stringTerm string
+}
+
+func (l *queryLexer) run() {
+	for state := l.lexVoid; state != nil; {
+		state = state()
+	}
+	close(l.tokens)
+}
+
+func (l *queryLexer) nextStart() {
+	// iterate by runes (utf8 characters)
+	// search for newlines and advance line/col counts
+	for i := l.start; i < l.pos; {
+		r, width := utf8.DecodeRuneInString(l.input[i:])
+		if r == '\n' {
+			l.line++
+			l.col = 1
+		} else {
+			l.col++
+		}
+		i += width
+	}
+	// advance start position to next token
+	l.start = l.pos
+}
+
+func (l *queryLexer) emit(t tokenType) {
+	l.tokens <- token{
+		Position: Position{l.line, l.col},
+		typ:      t,
+		val:      l.input[l.start:l.pos],
+	}
+	l.nextStart()
+}
+
+func (l *queryLexer) emitWithValue(t tokenType, value string) {
+	l.tokens <- token{
+		Position: Position{l.line, l.col},
+		typ:      t,
+		val:      value,
+	}
+	l.nextStart()
+}
+
+func (l *queryLexer) next() rune {
+	if l.pos >= len(l.input) {
+		l.width = 0
+		return eof
+	}
+	var r rune
+	r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
+	l.pos += l.width
+	return r
+}
+
+func (l *queryLexer) ignore() {
+	l.nextStart()
+}
+
+func (l *queryLexer) backup() {
+	l.pos -= l.width
+}
+
+func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
+	l.tokens <- token{
+		Position: Position{l.line, l.col},
+		typ:      tokenError,
+		val:      fmt.Sprintf(format, args...),
+	}
+	return nil
+}
+
+func (l *queryLexer) peek() rune {
+	r := l.next()
+	l.backup()
+	return r
+}
+
+func (l *queryLexer) accept(valid string) bool {
+	if strings.IndexRune(valid, l.next()) >= 0 {
+		return true
+	}
+	l.backup()
+	return false
+}
+
+func (l *queryLexer) follow(next string) bool {
+	return strings.HasPrefix(l.input[l.pos:], next)
+}
+
+func (l *queryLexer) lexVoid() queryLexStateFn {
+	for {
+		next := l.peek()
+		switch next {
+		case '$':
+			l.pos++
+			l.emit(tokenDollar)
+			continue
+		case '.':
+			if l.follow("..") {
+				l.pos += 2
+				l.emit(tokenDotDot)
+			} else {
+				l.pos++
+				l.emit(tokenDot)
+			}
+			continue
+		case '[':
+			l.pos++
+			l.emit(tokenLeftBracket)
+			continue
+		case ']':
+			l.pos++
+			l.emit(tokenRightBracket)
+			continue
+		case ',':
+			l.pos++
+			l.emit(tokenComma)
+			continue
+		case '*':
+			l.pos++
+			l.emit(tokenStar)
+			continue
+		case '(':
+			l.pos++
+			l.emit(tokenLeftParen)
+			continue
+		case ')':
+			l.pos++
+			l.emit(tokenRightParen)
+			continue
+		case '?':
+			l.pos++
+			l.emit(tokenQuestion)
+			continue
+		case ':':
+			l.pos++
+			l.emit(tokenColon)
+			continue
+		case '\'':
+			l.ignore()
+			l.stringTerm = string(next)
+			return l.lexString
+		case '"':
+			l.ignore()
+			l.stringTerm = string(next)
+			return l.lexString
+		}
+
+		if isSpace(next) {
+			l.next()
+			l.ignore()
+			continue
+		}
+
+		if isAlphanumeric(next) {
+			return l.lexKey
+		}
+
+		if next == '+' || next == '-' || isDigit(next) {
+			return l.lexNumber
+		}
+
+		if l.next() == eof {
+			break
+		}
+
+		return l.errorf("unexpected char: '%v'", next)
+	}
+	l.emit(tokenEOF)
+	return nil
+}
+
+func (l *queryLexer) lexKey() queryLexStateFn {
+	for {
+		next := l.peek()
+		if !isAlphanumeric(next) {
+			l.emit(tokenKey)
+			return l.lexVoid
+		}
+
+		if l.next() == eof {
+			break
+		}
+	}
+	l.emit(tokenEOF)
+	return nil
+}
+
+func (l *queryLexer) lexString() queryLexStateFn {
+	l.pos++
+	l.ignore()
+	growingString := ""
+
+	for {
+		if l.follow(l.stringTerm) {
+			l.emitWithValue(tokenString, growingString)
+			l.pos++
+			l.ignore()
+			return l.lexVoid
+		}
+
+		if l.follow("\\\"") {
+			l.pos++
+			growingString += "\""
+		} else if l.follow("\\'") {
+			l.pos++
+			growingString += "'"
+		} else if l.follow("\\n") {
+			l.pos++
+			growingString += "\n"
+		} else if l.follow("\\b") {
+			l.pos++
+			growingString += "\b"
+		} else if l.follow("\\f") {
+			l.pos++
+			growingString += "\f"
+		} else if l.follow("\\/") {
+			l.pos++
+			growingString += "/"
+		} else if l.follow("\\t") {
+			l.pos++
+			growingString += "\t"
+		} else if l.follow("\\r") {
+			l.pos++
+			growingString += "\r"
+		} else if l.follow("\\\\") {
+			l.pos++
+			growingString += "\\"
+		} else if l.follow("\\u") {
+			l.pos += 2
+			code := ""
+			for i := 0; i < 4; i++ {
+				c := l.peek()
+				l.pos++
+				if !isHexDigit(c) {
+					return l.errorf("unfinished unicode escape")
+				}
+				code = code + string(c)
+			}
+			l.pos--
+			intcode, err := strconv.ParseInt(code, 16, 32)
+			if err != nil {
+				return l.errorf("invalid unicode escape: \\u" + code)
+			}
+			growingString += string(rune(intcode))
+		} else if l.follow("\\") {
+			l.pos++
+			return l.errorf("invalid escape sequence: \\" + string(l.peek()))
+		} else {
+			growingString += string(l.peek())
+		}
+
+		if l.next() == eof {
+			break
+		}
+	}
+
+	return l.errorf("unclosed string")
+}
+
+func (l *queryLexer) lexNumber() queryLexStateFn {
+	l.ignore()
+	if !l.accept("+") {
+		l.accept("-")
+	}
+	pointSeen := false
+	digitSeen := false
+	for {
+		next := l.next()
+		if next == '.' {
+			if pointSeen {
+				return l.errorf("cannot have two dots in one float")
+			}
+			if !isDigit(l.peek()) {
+				return l.errorf("float cannot end with a dot")
+			}
+			pointSeen = true
+		} else if isDigit(next) {
+			digitSeen = true
+		} else {
+			l.backup()
+			break
+		}
+		if pointSeen && !digitSeen {
+			return l.errorf("cannot start float with a dot")
+		}
+	}
+
+	if !digitSeen {
+		return l.errorf("no digit in that number")
+	}
+	if pointSeen {
+		l.emit(tokenFloat)
+	} else {
+		l.emit(tokenInteger)
+	}
+	return l.lexVoid
+}
+
+// Entry point
+func lexQuery(input string) chan token {
+	l := &queryLexer{
+		input:  input,
+		tokens: make(chan token),
+		line:   1,
+		col:    1,
+	}
+	go l.run()
+	return l.tokens
+}
diff --git a/querylexer_test.go b/querylexer_test.go
new file mode 100644
index 0000000..a9bd674
--- /dev/null
+++ b/querylexer_test.go
@@ -0,0 +1,97 @@
+package toml
+
+import (
+	"testing"
+)
+
+func testQLFlow(t *testing.T, input string, expectedFlow []token) {
+	ch := lexQuery(input)
+	for idx, expected := range expectedFlow {
+		token := <-ch
+		if token != expected {
+			t.Log("While testing #", idx, ":", input)
+			t.Log("compared", token, "to", expected)
+			t.Log(token.val, "<->", expected.val)
+			t.Log(token.typ, "<->", expected.typ)
+			t.Log(token.Line, "<->", expected.Line)
+			t.Log(token.Col, "<->", expected.Col)
+			t.FailNow()
+		}
+	}
+
+	tok, ok := <-ch
+	if ok {
+		t.Log("channel is not closed!")
+		t.Log(len(ch)+1, "tokens remaining:")
+
+		t.Log("token ->", tok)
+		for token := range ch {
+			t.Log("token ->", token)
+		}
+		t.FailNow()
+	}
+}
+
+func TestLexSpecialChars(t *testing.T) {
+	testQLFlow(t, " .$[]..()?*", []token{
+		token{Position{1, 2}, tokenDot, "."},
+		token{Position{1, 3}, tokenDollar, "$"},
+		token{Position{1, 4}, tokenLeftBracket, "["},
+		token{Position{1, 5}, tokenRightBracket, "]"},
+		token{Position{1, 6}, tokenDotDot, ".."},
+		token{Position{1, 8}, tokenLeftParen, "("},
+		token{Position{1, 9}, tokenRightParen, ")"},
+		token{Position{1, 10}, tokenQuestion, "?"},
+		token{Position{1, 11}, tokenStar, "*"},
+		token{Position{1, 12}, tokenEOF, ""},
+	})
+}
+
+func TestLexString(t *testing.T) {
+	testQLFlow(t, "'foo'", []token{
+		token{Position{1, 2}, tokenString, "foo"},
+		token{Position{1, 6}, tokenEOF, ""},
+	})
+}
+
+func TestLexDoubleString(t *testing.T) {
+	testQLFlow(t, `"bar"`, []token{
+		token{Position{1, 2}, tokenString, "bar"},
+		token{Position{1, 6}, tokenEOF, ""},
+	})
+}
+
+func TestLexKey(t *testing.T) {
+	testQLFlow(t, "foo", []token{
+		token{Position{1, 1}, tokenKey, "foo"},
+		token{Position{1, 4}, tokenEOF, ""},
+	})
+}
+
+func TestLexRecurse(t *testing.T) {
+	testQLFlow(t, "$..*", []token{
+		token{Position{1, 1}, tokenDollar, "$"},
+		token{Position{1, 2}, tokenDotDot, ".."},
+		token{Position{1, 4}, tokenStar, "*"},
+		token{Position{1, 5}, tokenEOF, ""},
+	})
+}
+
+func TestLexBracketKey(t *testing.T) {
+	testQLFlow(t, "$[foo]", []token{
+		token{Position{1, 1}, tokenDollar, "$"},
+		token{Position{1, 2}, tokenLeftBracket, "["},
+		token{Position{1, 3}, tokenKey, "foo"},
+		token{Position{1, 6}, tokenRightBracket, "]"},
+		token{Position{1, 7}, tokenEOF, ""},
+	})
+}
+
+func TestLexSpace(t *testing.T) {
+	testQLFlow(t, "foo bar baz", []token{
+		token{Position{1, 1}, tokenKey, "foo"},
+		token{Position{1, 5}, tokenKey, "bar"},
+		token{Position{1, 9}, tokenKey, "baz"},
+		token{Position{1, 12}, tokenEOF, ""},
+	})
+}
diff --git a/queryparser.go b/queryparser.go
new file mode 100644
index 0000000..be6954f
--- /dev/null
+++ b/queryparser.go
@@ -0,0 +1,275 @@
+/*
+  Based on the "jsonpath" spec/concept.
+
+  http://goessner.net/articles/JsonPath/
+  https://code.google.com/p/json-path/
+*/
+
+package toml
+
+import (
+	"fmt"
+	"math"
+)
+
+type queryParser struct {
+	flow         chan token
+	tokensBuffer []token
+	query        *Query
+	union        []pathFn
+	err          error
+}
+
+type queryParserStateFn func() queryParserStateFn
+
+// Formats and panics an error message based on a token
+func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
+	p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...)
+	return nil // trigger parse to end
+}
+
+func (p *queryParser) run() {
+	for state := p.parseStart; state != nil; {
+		state = state()
+	}
+}
+
+func (p *queryParser) backup(tok *token) {
+	p.tokensBuffer = append(p.tokensBuffer, *tok)
+}
+
+func (p *queryParser) peek() *token {
+	if len(p.tokensBuffer) != 0 {
+		return &(p.tokensBuffer[0])
+	}
+
+	tok, ok := <-p.flow
+	if !ok {
+		return nil
+	}
+	p.backup(&tok)
+	return &tok
+}
+
+func (p *queryParser) lookahead(types ...tokenType) bool {
+	result := true
+	buffer := []token{}
+
+	for _, typ := range types {
+		tok := p.getToken()
+		if tok == nil {
+			result = false
+			break
+		}
+		buffer = append(buffer, *tok)
+		if tok.typ != typ {
+			result = false
+			break
+		}
+	}
+	// add the tokens back to the buffer, and return
+	p.tokensBuffer = append(p.tokensBuffer, buffer...)
+	return result
+}
+
+func (p *queryParser) getToken() *token {
+	if len(p.tokensBuffer) != 0 {
+		tok := p.tokensBuffer[0]
+		p.tokensBuffer = p.tokensBuffer[1:]
+		return &tok
+	}
+	tok, ok := <-p.flow
+	if !ok {
+		return nil
+	}
+	return &tok
+}
+
+func (p *queryParser) parseStart() queryParserStateFn {
+	tok := p.getToken()
+
+	if tok == nil || tok.typ == tokenEOF {
+		return nil
+	}
+
+	if tok.typ != tokenDollar {
+		return p.parseError(tok, "Expected '$' at start of expression")
+	}
+
+	return p.parseMatchExpr
+}
+
+// handle '.' prefix, '[]', and '..'
+func (p *queryParser) parseMatchExpr() queryParserStateFn {
+	tok := p.getToken()
+	switch tok.typ {
+	case tokenDotDot:
+		p.query.appendPath(&matchRecursiveFn{})
+		// nested parse for '..'
+		tok := p.getToken()
+		switch tok.typ {
+		case tokenKey:
+			p.query.appendPath(newMatchKeyFn(tok.val))
+			return p.parseMatchExpr
+		case tokenLeftBracket:
+			return p.parseBracketExpr
+		case tokenStar:
+			// do nothing - the recursive predicate is enough
+			return p.parseMatchExpr
+		}
+
+	case tokenDot:
+		// nested parse for '.'
+		tok := p.getToken()
+		switch tok.typ {
+		case tokenKey:
+			p.query.appendPath(newMatchKeyFn(tok.val))
+			return p.parseMatchExpr
+		case tokenStar:
+			p.query.appendPath(&matchAnyFn{})
+			return p.parseMatchExpr
+		}
+
+	case tokenLeftBracket:
+		return p.parseBracketExpr
+
+	case tokenEOF:
+		return nil // allow EOF at this stage
+	}
+	return p.parseError(tok, "expected match expression")
+	return nil
+}
+
+func (p *queryParser) parseBracketExpr() queryParserStateFn {
+	if p.lookahead(tokenInteger, tokenColon) {
+		return p.parseSliceExpr
+	}
+	if p.peek().typ == tokenColon {
+		return p.parseSliceExpr
+	}
+	return p.parseUnionExpr
+}
+
+func (p *queryParser) parseUnionExpr() queryParserStateFn {
+	var tok *token
+
+	// this state can be traversed after some sub-expressions
+	// so be careful when setting up state in the parser
+	if p.union == nil {
+		p.union = []pathFn{}
+	}
+
+loop: // labeled loop for easy breaking
+	for {
+		if len(p.union) > 0 {
+			// parse delimiter or terminator
+			tok = p.getToken()
+			switch tok.typ {
+			case tokenComma:
+				// do nothing
+			case tokenRightBracket:
+				break loop
+			default:
+				return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
+			}
+		}
+
+		// parse sub expression
+		tok = p.getToken()
+		switch tok.typ {
+		case tokenInteger:
+			p.union = append(p.union, newMatchIndexFn(tok.Int()))
+		case tokenKey:
+			p.union = append(p.union, newMatchKeyFn(tok.val))
+		case tokenString:
+			p.union = append(p.union, newMatchKeyFn(tok.val))
+		case tokenQuestion:
+			return p.parseFilterExpr
+		default:
+			return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
+		}
+	}
+
+	// if there is only one sub-expression, use that instead
+	if len(p.union) == 1 {
+		p.query.appendPath(p.union[0])
+	} else {
+		p.query.appendPath(&matchUnionFn{p.union})
+	}
+
+	p.union = nil // clear out state
+	return p.parseMatchExpr
+}
+
+func (p *queryParser) parseSliceExpr() queryParserStateFn {
+	// init slice to grab all elements
+	start, end, step := 0, math.MaxInt64, 1
+
+	// parse optional start
+	tok := p.getToken()
+	if tok.typ == tokenInteger {
+		start = tok.Int()
+		tok = p.getToken()
+	}
+	if tok.typ != tokenColon {
+		return p.parseError(tok, "expected ':'")
+	}
+
+	// parse optional end
+	tok = p.getToken()
+	if tok.typ == tokenInteger {
+		end = tok.Int()
+		tok = p.getToken()
+	}
+	if tok.typ == tokenRightBracket {
+		p.query.appendPath(newMatchSliceFn(start, end, step))
+		return p.parseMatchExpr
+	}
+	if tok.typ != tokenColon {
+		return p.parseError(tok, "expected ']' or ':'")
+	}
+
+	// parse optional step
+	tok = p.getToken()
+	if tok.typ == tokenInteger {
+		step = tok.Int()
+		if step < 0 {
+			return p.parseError(tok, "step must be a positive value")
+		}
+		tok = p.getToken()
+	}
+	if tok.typ != tokenRightBracket {
+		return p.parseError(tok, "expected ']'")
+	}
+
+	p.query.appendPath(newMatchSliceFn(start, end, step))
+	return p.parseMatchExpr
+}
+
+func (p *queryParser) parseFilterExpr() queryParserStateFn {
+	tok := p.getToken()
+	if tok.typ != tokenLeftParen {
+		return p.parseError(tok, "expected left-parenthesis for filter expression")
+	}
+	tok = p.getToken()
+	if tok.typ != tokenKey && tok.typ != tokenString {
+		return p.parseError(tok, "expected key or string for filter funciton name")
+	}
+	name := tok.val
+	tok = p.getToken()
+	if tok.typ != tokenRightParen {
+		return p.parseError(tok, "expected right-parenthesis for filter expression")
+	}
+	p.union = append(p.union, newMatchFilterFn(name, tok.Position))
+	return p.parseUnionExpr
+}
+
+func parseQuery(flow chan token) (*Query, error) {
+	parser := &queryParser{
+		flow:         flow,
+		tokensBuffer: []token{},
+		query:        newQuery(),
+	}
+	parser.run()
+	return parser.query, parser.err
+}
diff --git a/queryparser_test.go b/queryparser_test.go
new file mode 100644
index 0000000..b2b85ce
--- /dev/null
+++ b/queryparser_test.go
@@ -0,0 +1,483 @@
+package toml
+
+import (
+	"fmt"
+	"io/ioutil"
+	"sort"
+	"strings"
+	"testing"
+	"time"
+)
+
+type queryTestNode struct {
+	value    interface{}
+	position Position
+}
+
+func valueString(root interface{}) string {
+	result := "" //fmt.Sprintf("%T:", root)
+	switch node := root.(type) {
+	case *tomlValue:
+		return valueString(node.value)
+	case *QueryResult:
+		items := []string{}
+		for i, v := range node.Values() {
+			items = append(items, fmt.Sprintf("%s:%s",
+				node.Positions()[i].String(), valueString(v)))
+		}
+		sort.Strings(items)
+		result = "[" + strings.Join(items, ", ") + "]"
+	case queryTestNode:
+		result = fmt.Sprintf("%s:%s",
+			node.position.String(), valueString(node.value))
+	case []interface{}:
+		items := []string{}
+		for _, v := range node {
+			items = append(items, valueString(v))
+		}
+		sort.Strings(items)
+		result = "[" + strings.Join(items, ", ") + "]"
+	case *TomlTree:
+		// workaround for unreliable map key ordering
+		items := []string{}
+		for _, k := range node.Keys() {
+			v := node.GetPath([]string{k})
+			items = append(items, k+":"+valueString(v))
+		}
+		sort.Strings(items)
+		result = "{" + strings.Join(items, ", ") + "}"
+	case map[string]interface{}:
+		// workaround for unreliable map key ordering
+		items := []string{}
+		for k, v := range node {
+			items = append(items, k+":"+valueString(v))
+		}
+		sort.Strings(items)
+		result = "{" + strings.Join(items, ", ") + "}"
+	case int64:
+		result += fmt.Sprintf("%d", node)
+	case string:
+		result += "'" + node + "'"
+	case float64:
+		result += fmt.Sprintf("%f", node)
+	case bool:
+		result += fmt.Sprintf("%t", node)
+	case time.Time:
+		result += fmt.Sprintf("'%v'", node)
+	}
+	return result
+}
+
+func assertValue(t *testing.T, result, ref interface{}) {
+	pathStr := valueString(result)
+	refStr := valueString(ref)
+	if pathStr != refStr {
+		t.Errorf("values do not match")
+		t.Log("test:", pathStr)
+		t.Log("ref: ", refStr)
+	}
+}
+
+func assertQueryPositions(t *testing.T, toml, query string, ref []interface{}) {
+	tree, err := Load(toml)
+	if err != nil {
+		t.Errorf("Non-nil toml parse error: %v", err)
+		return
+	}
+	q, err := CompileQuery(query)
+	if err != nil {
+		t.Error(err)
+		return
+	}
+	results := q.Execute(tree)
+	assertValue(t, results, ref)
+}
+
+func TestQueryRoot(t *testing.T) {
+	assertQueryPositions(t,
+		"a = 42",
+		"$",
+		[]interface{}{
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(42),
+				}, Position{1, 1},
+			},
+		})
+}
+
+func TestQueryKey(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo]\na = 42",
+		"$.foo.a",
+		[]interface{}{
+			queryTestNode{
+				int64(42), Position{2, 1},
+			},
+		})
+}
+
+func TestQueryKeyString(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo]\na = 42",
+		"$.foo['a']",
+		[]interface{}{
+			queryTestNode{
+				int64(42), Position{2, 1},
+			},
+		})
+}
+
+func TestQueryIndex(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
+		"$.foo.a[5]",
+		[]interface{}{
+			queryTestNode{
+				int64(6), Position{2, 1},
+			},
+		})
+}
+
+func TestQuerySliceRange(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
+		"$.foo.a[0:5]",
+		[]interface{}{
+			queryTestNode{
+				int64(1), Position{2, 1},
+			},
+			queryTestNode{
+				int64(2), Position{2, 1},
+			},
+			queryTestNode{
+				int64(3), Position{2, 1},
+			},
+			queryTestNode{
+				int64(4), Position{2, 1},
+			},
+			queryTestNode{
+				int64(5), Position{2, 1},
+			},
+		})
+}
+
+func TestQuerySliceStep(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
+		"$.foo.a[0:5:2]",
+		[]interface{}{
+			queryTestNode{
+				int64(1), Position{2, 1},
+			},
+			queryTestNode{
+				int64(3), Position{2, 1},
+			},
+			queryTestNode{
+				int64(5), Position{2, 1},
+			},
+		})
+}
+
+func TestQueryAny(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo.bar]\na=1\nb=2\n[foo.baz]\na=3\nb=4",
+		"$.foo.*",
+		[]interface{}{
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(1),
+					"b": int64(2),
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(3),
+					"b": int64(4),
+				}, Position{4, 1},
+			},
+		})
+}
+func TestQueryUnionSimple(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
+		"$.*[bar,foo]",
+		[]interface{}{
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(1),
+					"b": int64(2),
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(3),
+					"b": int64(4),
+				}, Position{4, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(5),
+					"b": int64(6),
+				}, Position{7, 1},
+			},
+		})
+}
+
+func TestQueryRecursionAll(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
+		"$..*",
+		[]interface{}{
+			queryTestNode{
+				map[string]interface{}{
+					"foo": map[string]interface{}{
+						"bar": map[string]interface{}{
+							"a": int64(1),
+							"b": int64(2),
+						},
+					},
+					"baz": map[string]interface{}{
+						"foo": map[string]interface{}{
+							"a": int64(3),
+							"b": int64(4),
+						},
+					},
+					"gorf": map[string]interface{}{
+						"foo": map[string]interface{}{
+							"a": int64(5),
+							"b": int64(6),
+						},
+					},
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"bar": map[string]interface{}{
+						"a": int64(1),
+						"b": int64(2),
+					},
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(1),
+					"b": int64(2),
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				int64(1), Position{2, 1},
+			},
+			queryTestNode{
+				int64(2), Position{3, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"foo": map[string]interface{}{
+						"a": int64(3),
+						"b": int64(4),
+					},
+				}, Position{4, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(3),
+					"b": int64(4),
+				}, Position{4, 1},
+			},
+			queryTestNode{
+				int64(3), Position{5, 1},
+			},
+			queryTestNode{
+				int64(4), Position{6, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"foo": map[string]interface{}{
+						"a": int64(5),
+						"b": int64(6),
+					},
+				}, Position{7, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(5),
+					"b": int64(6),
+				}, Position{7, 1},
+			},
+			queryTestNode{
+				int64(5), Position{8, 1},
+			},
+			queryTestNode{
+				int64(6), Position{9, 1},
+			},
+		})
+}
+
+func TestQueryRecursionUnionSimple(t *testing.T) {
+	assertQueryPositions(t,
+		"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
+		"$..['foo','bar']",
+		[]interface{}{
+			queryTestNode{
+				map[string]interface{}{
+					"bar": map[string]interface{}{
+						"a": int64(1),
+						"b": int64(2),
+					},
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(3),
+					"b": int64(4),
+				}, Position{4, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(1),
+					"b": int64(2),
+				}, Position{1, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"a": int64(5),
+					"b": int64(6),
+				}, Position{7, 1},
+			},
+		})
+}
+
+func TestQueryFilterFn(t *testing.T) {
+	buff, err := ioutil.ReadFile("example.toml")
+	if err != nil {
+		t.Error(err)
+		return
+	}
+
+	assertQueryPositions(t, string(buff),
+		"$..[?(int)]",
+		[]interface{}{
+			queryTestNode{
+				int64(8001), Position{13, 1},
+			},
+			queryTestNode{
+				int64(8001), Position{13, 1},
+			},
+			queryTestNode{
+				int64(8002), Position{13, 1},
+			},
+			queryTestNode{
+				int64(5000), Position{14, 1},
+			},
+		})
+
+	assertQueryPositions(t, string(buff),
+		"$..[?(string)]",
+		[]interface{}{
+			queryTestNode{
+				"TOML Example", Position{3, 1},
+			},
+			queryTestNode{
+				"Tom Preston-Werner", Position{6, 1},
+			},
+			queryTestNode{
+				"GitHub", Position{7, 1},
+			},
+			queryTestNode{
+				"GitHub Cofounder & CEO\nLikes tater tots and beer.",
+				Position{8, 1},
+			},
+			queryTestNode{
+				"192.168.1.1", Position{12, 1},
+			},
+			queryTestNode{
+				"10.0.0.1", Position{21, 3},
+			},
+			queryTestNode{
+				"eqdc10", Position{22, 3},
+			},
+			queryTestNode{
+				"10.0.0.2", Position{25, 3},
+			},
+			queryTestNode{
+				"eqdc10", Position{26, 3},
+			},
+		})
+
+	assertQueryPositions(t, string(buff),
+		"$..[?(float)]",
+		[]interface{}{
+		// no float values in document
+		})
+
+	tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
+	assertQueryPositions(t, string(buff),
+		"$..[?(tree)]",
+		[]interface{}{
+			queryTestNode{
+				map[string]interface{}{
+					"name":         "Tom Preston-Werner",
+					"organization": "GitHub",
+					"bio":          "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+					"dob":          tv,
+				}, Position{5, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"server":         "192.168.1.1",
+					"ports":          []interface{}{int64(8001), int64(8001), int64(8002)},
+					"connection_max": int64(5000),
+					"enabled":        true,
+				}, Position{11, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"alpha": map[string]interface{}{
+						"ip": "10.0.0.1",
+						"dc": "eqdc10",
+					},
+					"beta": map[string]interface{}{
+						"ip": "10.0.0.2",
+						"dc": "eqdc10",
+					},
+				}, Position{17, 1},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"ip": "10.0.0.1",
+					"dc": "eqdc10",
+				}, Position{20, 3},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"ip": "10.0.0.2",
+					"dc": "eqdc10",
+				}, Position{24, 3},
+			},
+			queryTestNode{
+				map[string]interface{}{
+					"data": []interface{}{
+						[]interface{}{"gamma", "delta"},
+						[]interface{}{int64(1), int64(2)},
+					},
+				}, Position{28, 1},
+			},
+		})
+
+	assertQueryPositions(t, string(buff),
+		"$..[?(time)]",
+		[]interface{}{
+			queryTestNode{
+				tv, Position{9, 1},
+			},
+		})
+
+	assertQueryPositions(t, string(buff),
+		"$..[?(bool)]",
+		[]interface{}{
+			queryTestNode{
+				true, Position{15, 1},
+			},
+		})
+}
diff --git a/token.go b/token.go
new file mode 100644
index 0000000..22ad37a
--- /dev/null
+++ b/token.go
@@ -0,0 +1,132 @@
+package toml
+
+import (
+	"fmt"
+	"strconv"
+	"unicode"
+)
+
+// Define tokens
+type tokenType int
+
+const (
+	eof = -(iota + 1)
+)
+
+const (
+	tokenError tokenType = iota
+	tokenEOF
+	tokenComment
+	tokenKey
+	tokenString
+	tokenInteger
+	tokenTrue
+	tokenFalse
+	tokenFloat
+	tokenEqual
+	tokenLeftBracket
+	tokenRightBracket
+	tokenLeftParen
+	tokenRightParen
+	tokenDoubleLeftBracket
+	tokenDoubleRightBracket
+	tokenDate
+	tokenKeyGroup
+	tokenKeyGroupArray
+	tokenComma
+	tokenColon
+	tokenDollar
+	tokenStar
+	tokenQuestion
+	tokenDot
+	tokenDotDot
+	tokenEOL
+)
+
+var tokenTypeNames = []string{
+	"EOF",
+	"Comment",
+	"Key",
+	"String",
+	"Integer",
+	"True",
+	"False",
+	"Float",
+	"=",
+	"[",
+	"[",
+	"(",
+	")",
+	"]]",
+	"[[",
+	"Date",
+	"KeyGroup",
+	"KeyGroupArray",
+	",",
+	":",
+	"$",
+	"*",
+	"?",
+	".",
+	"..",
+	"EOL",
+}
+
+type token struct {
+	Position
+	typ tokenType
+	val string
+}
+
+func (tt tokenType) String() string {
+	idx := int(tt)
+	if idx < len(tokenTypeNames) {
+		return tokenTypeNames[idx]
+	}
+	return "Unknown"
+}
+
+func (t token) Int() int {
+	if result, err := strconv.Atoi(t.val); err != nil {
+		panic(err)
+	} else {
+		return result
+	}
+}
+
+func (t token) String() string {
+	switch t.typ {
+	case tokenEOF:
+		return "EOF"
+	case tokenError:
+		return t.val
+	}
+
+	if len(t.val) > 10 {
+		return fmt.Sprintf("%.10q...", t.val)
+	}
+	return fmt.Sprintf("%q", t.val)
+}
+
+func isSpace(r rune) bool {
+	return r == ' ' || r == '\t'
+}
+
+func isAlphanumeric(r rune) bool {
+	return unicode.IsLetter(r) || r == '_'
+}
+
+func isKeyChar(r rune) bool {
+	// "Keys start with the first non-whitespace character and end with the last
+	// non-whitespace character before the equals sign."
+	return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=')
+}
+
+func isDigit(r rune) bool {
+	return unicode.IsNumber(r)
+}
+
+func isHexDigit(r rune) bool {
+	return isDigit(r) ||
+		r == 'A' || r == 'B' || r == 'C' || r == 'D' || r == 'E' || r == 'F'
+}
diff --git a/toml.go b/toml.go
index 7cd74b0..13ef35e 100644
--- a/toml.go
+++ b/toml.go
@@ -1,7 +1,3 @@
-// Package toml is a TOML markup language parser.
-//
-// This version supports the specification as described in
-// https://github.com/toml-lang/toml/blob/master/versions/toml-v0.2.0.md
 package toml
 
 import (
@@ -28,7 +24,7 @@
 func newTomlTree() *TomlTree {
 	return &TomlTree{
 		values:   make(map[string]interface{}),
-		position: Position{0, 0},
+		position: Position{},
 	}
 }
 
@@ -103,7 +99,7 @@
 // GetPosition returns the position of the given key.
 func (t *TomlTree) GetPosition(key string) Position {
 	if key == "" {
-		return Position{0, 0}
+		return t.position
 	}
 	return t.GetPositionPath(strings.Split(key, "."))
 }
@@ -199,7 +195,7 @@
 // and tree[a][b][c]
 //
 // Returns nil on success, error object on failure
-func (t *TomlTree) createSubTree(keys []string) error {
+func (t *TomlTree) createSubTree(keys []string, pos Position) error {
 	subtree := t
 	for _, intermediateKey := range keys {
 		if intermediateKey == "" {
@@ -207,8 +203,10 @@
 		}
 		nextTree, exists := subtree.values[intermediateKey]
 		if !exists {
-			nextTree = newTomlTree()
-			subtree.values[intermediateKey] = nextTree
+			tree := newTomlTree()
+			tree.position = pos
+			subtree.values[intermediateKey] = tree
+			nextTree = tree
 		}
 
 		switch node := nextTree.(type) {
@@ -317,6 +315,14 @@
 	return result
 }
 
+func (t *TomlTree) Query(query string) (*QueryResult, error) {
+	if q, err := CompileQuery(query); err != nil {
+		return nil, err
+	} else {
+		return q.Execute(t), nil
+	}
+}
+
 // ToString generates a human-readable representation of the current tree.
 // Output spans multiple lines, and is suitable for ingest by a TOML parser
 func (t *TomlTree) ToString() string {
@@ -333,8 +339,7 @@
 			err = errors.New(r.(string))
 		}
 	}()
-	_, flow := lex(content)
-	tree = parse(flow)
+	tree = parseToml(lexToml(content))
 	return
 }
 
diff --git a/toml_test.go b/toml_test.go
index 09950b9..c89b973 100644
--- a/toml_test.go
+++ b/toml_test.go
@@ -47,3 +47,28 @@
 		}
 	}
 }
+
+func TestTomlQuery(t *testing.T) {
+	tree, err := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
+	if err != nil {
+		t.Error(err)
+		return
+	}
+	result, err := tree.Query("$.foo.bar")
+	if err != nil {
+		t.Error(err)
+		return
+	}
+	values := result.Values()
+	if len(values) != 1 {
+		t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
+	}
+
+	if tt, ok := values[0].(*TomlTree); !ok {
+		t.Errorf("Expected type of TomlTree: %T Tv", values[0], values[0])
+	} else if tt.Get("a") != int64(1) {
+		t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
+	} else if tt.Get("b") != int64(2) {
+		t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
+	}
+}