Straightening out a few small wrinkles that came up from testing against the BurntSushi testset.

This commit is contained in:
Maurice Makaay 2019-06-27 22:01:56 +00:00
parent 13d0011d9d
commit 0d4cb356e9
10 changed files with 109 additions and 92 deletions

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
# Build output
cmd/toml-test-decoder/toml-test-decoder
# ---> Vim
# Swap
[._]*.s[a-v][a-z]

View File

@ -81,10 +81,10 @@ const (
TypeLocalTime ValueType = "time"
// TypeArrayOfTables identifies an [[array.of.tables]].
TypeArrayOfTables ValueType = "array"
TypeArrayOfTables ValueType = "arrayOfTables"
// TypeArray identifies ["an", "inline", "static", "array"].
TypeArray ValueType = "static array"
TypeArray ValueType = "array"
// TypeTable identifies an { "inline" = "table" } or [standard.table].
TypeTable ValueType = "table"
@ -115,9 +115,9 @@ func (doc *Document) OpenTable(key Key) error {
doc.CurrentKey = nil
doc.Current = doc.Root
// Go over all requested levels of the key. For all levels, except the last
// one, it is okay if a Table already exists. For at least the last level,
// no table or value must exist, because that would mean we are overwriting
// an existing key/value pair, which is not allowed.
// one, it is okay if a Table or TableArray already exists. For at least the
// last level, no table or value must exist, because that would mean we are
// overwriting an existing key/value pair, which is not allowed.
node, lastKeyPart, err := doc.makeTablePath(key)
if err != nil {
return fmt.Errorf("invalid table: %s", err)
@ -146,9 +146,9 @@ func (doc *Document) OpenArrayOfTables(key Key) error {
doc.CurrentKey = nil
doc.Current = doc.Root
// Go over all requested levels of the key. For all levels, except the last
// one, it is okay if a Table already exists. For the last level, either
// no value must exist (in which case a table array will be created), or a
// table array must exist.
// one, it is okay if a Table or ArrayOfTables already exists. For the last
// level, either no value must exist (in which case a table array will be
// created), or a table array must exist.
// Other cases would mean we are overwriting an existing key/value pair,
// which is not allowed.
node, lastKeyPart, err := doc.makeTablePath(key)
@ -193,13 +193,21 @@ func (doc *Document) makeTablePath(key Key) (Table, string, error) {
}
if subValue, ok := node[keyPart]; ok {
// You cannot overwrite an already defined key, regardless its value.
// When a value already exists at the current key, this can only be a table.
if subValue.Type != TypeTable {
// When a value already exists at the current key, this can only be a table
// or an array of tables. In case of an array of tables, the last created
// table will be used.
if subValue.Type == TypeTable {
// A table was found, traverse to that table.
node = subValue.Data[0].(Table)
} else if subValue.Type == TypeArrayOfTables {
// An array of tables was found, traverse to the last table in the array.
lastValue := subValue.Data[len(subValue.Data)-1].(*Value)
lastTable := lastValue.Data[0].(Table)
node = lastTable
} else {
path := doc.formatKeyPath(key, i)
return nil, "", fmt.Errorf("%s value already exists at key %s", subValue.Type, path)
}
// All is okay, traverse to the subtable.
node = subValue.Data[0].(Table)
} else {
// The subtable does not exist yet. Create the subtable.
subTable := make(Table)

View File

@ -32,6 +32,55 @@ func Test_ConstructSlightlyComplexStructure(t *testing.T) {
`"key1": {"key2 a": {"dah": false, "dooh": true}, "key2 b": {"dieh": 1.111, "duh": 1.18e-12, "foo": {"bar": [1, 2]}}}}`)
}
// This document structure represents the actual structure of the example for nested
// arrays of tables from the TOML 0.5.0 specficiation.
func Test_ConstructNestedArraysOfTables(t *testing.T) {
testAST(t, func() (error, *ast.Document) {
p := ast.NewDocument()
p.OpenArrayOfTables(ast.NewKey("fruit"))
p.SetKeyValuePair(ast.NewKey("name"), ast.NewValue(ast.TypeString, "apple"))
p.OpenTable(ast.NewKey("fruit", "physical"))
p.SetKeyValuePair(ast.NewKey("color"), ast.NewValue(ast.TypeString, "red"))
p.SetKeyValuePair(ast.NewKey("shape"), ast.NewValue(ast.TypeString, "round"))
p.OpenArrayOfTables(ast.NewKey("fruit", "variety"))
p.SetKeyValuePair(ast.NewKey("name"), ast.NewValue(ast.TypeString, "red delicious"))
p.OpenArrayOfTables(ast.NewKey("fruit", "variety"))
p.SetKeyValuePair(ast.NewKey("name"), ast.NewValue(ast.TypeString, "granny smith"))
p.OpenArrayOfTables(ast.NewKey("fruit"))
p.SetKeyValuePair(ast.NewKey("name"), ast.NewValue(ast.TypeString, "banana"))
p.OpenArrayOfTables(ast.NewKey("fruit", "variety"))
p.SetKeyValuePair(ast.NewKey("name"), ast.NewValue(ast.TypeString, "plantain"))
return nil, p
},
"",
`{"fruit": [`+
`{"name": "apple", "physical": {"color": "red", "shape": "round"}, "variety": [{"name": "red delicious"}, {"name": "granny smith"}]}, `+
`{"name": "banana", "variety": [{"name": "plantain"}]}`+
`]}`)
}
// This is a case from the BurntSushi test set which my parser did not correctly
// handle. From the specs, it was unclear to me that is was okay to handle things
// in this way. The actual TOML document that would lead to this looks like:
//
// [a.b.c]
// answer = 42
//
// [a]
// better = 43
func Test_ConstructExplicitTableAfterImplicitSubtable(t *testing.T) {
testAST(t, func() (error, *ast.Document) {
p := ast.NewDocument()
p.OpenTable(ast.NewKey("a", "b", "c"))
p.SetKeyValuePair(ast.NewKey("answer"), ast.NewValue(ast.TypeString, "42"))
p.OpenTable(ast.NewKey("a"))
p.SetKeyValuePair(ast.NewKey("better"), ast.NewValue(ast.TypeString, "43"))
return nil, p
},
"",
`{"a": {"b": {"c": {"answer": "42"}}, "better": "43"}}`)
}
func Test_EmptyKeyForCreatingTablePath_Panics(t *testing.T) {
defer func() {
r := recover()

View File

@ -4,20 +4,14 @@
package main
import (
//"encoding/json"
"encoding/json"
"flag"
"fmt"
"sort"
"strings"
"time"
//"fmt"
"log"
"os"
"path"
//"time"
"sort"
"strings"
"time"
"git.makaay.nl/mauricem/go-toml/ast"
"git.makaay.nl/mauricem/go-toml/parse"
@ -45,66 +39,12 @@ func main() {
toml, err := parse.Run(os.Stdin)
if err != nil {
log.Fatalf("Error decoding TOML: %s", err)
}
} else {
sushi := makeSushi(ast.NewValue(ast.TypeTable, toml))
var v = new(interface{})
if err := json.NewDecoder(strings.NewReader(sushi)).Decode(v); err != nil {
log.Fatalf("Error decoding JSON: %s\n%s\n", err, sushi)
}
encoder := json.NewEncoder(os.Stdout)
encoder.SetIndent("", " ")
if err := encoder.Encode(v); err != nil {
log.Fatalf("Error encoding JSON: %s", err)
fmt.Println(sushi)
}
}
// func translate(node *ast.Value) interface{} {
// switch node.Type {
// case ast.TypeTable:
// typed := make(map[string]interface{}, len(node.Data))
// for k, v := range node.Data[0].(ast.Table) {
// typed[k] = translate(v)
// }
// return typed
// case ast.TypeArrayOfTables:
// typed := make([]map[string]interface{}, len(node.Data))
// for i, v := range node.Data {
// value := v.(*ast.Value)
// typed[i] = translate(value).(map[string]interface{})
// }
// return typed
// case []interface{}:
// typed := make([]interface{}, len(orig))
// for i, v := range orig {
// typed[i] = translate(v)
// }
// // We don't really need to tag arrays, but let's be future proof.
// // (If TOML ever supports tuples, we'll need this.)
// return tag("array", typed)
// case time.Time:
// return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
// case bool:
// return tag("bool", fmt.Sprintf("%v", orig))
// case int64:
// return tag("integer", fmt.Sprintf("%d", orig))
// case float64:
// return tag("float", fmt.Sprintf("%v", orig))
// case string:
// return tag("string", orig)
// }
// panic(fmt.Sprintf("Unknown type: %T", tomlData))
// }
// func tag(typeName string, data interface{}) map[string]interface{} {
// return map[string]interface{}{
// "type": typeName,
// "value": data,
// }
// }
func makeSushi(value *ast.Value) string {
switch value.Type {
case ast.TypeString:
@ -152,7 +92,7 @@ func makeSushi(value *ast.Value) string {
}
return fmt.Sprintf("{%s}", strings.Join(values, ", "))
default:
return renderValue(string(value.Type), fmt.Sprintf("%q", value.Data[0]))
panic(fmt.Sprintf("Unhandled data type: %s", value.Type))
}
}

View File

@ -13,6 +13,7 @@ func TestComment(t *testing.T) {
{"# ending in EOL & EOF\r\n", `{}`, ``},
{`# \xxx/ \u can't escape/`, `{}`, ``},
{"# \tlexe\r accepts embedded ca\r\riage \returns\r\n", `{}`, ``},
{" # multiple\n#lines\n \t\n\n\t#with\n ### comments!", `{}`, ``},
{"# with data and newline\ncode continues here", `{}`, `unexpected input (expected a value assignment) at line 2, column 5`},
} {
p := newParser()

View File

@ -14,9 +14,9 @@ var (
func (t *parser) startDocument(p *parse.API) {
for {
p.Accept(dropWhitespace)
p.Accept(dropComment)
switch {
case p.Accept(whitespaceOrComment):
// NOOP, skip these
case p.Peek(detectTableOrArrayOfTables):
p.Handle(t.startTable)
case p.Peek(detectKey):

View File

@ -34,7 +34,7 @@ var (
keySeparatorDot = c.Seq(dropBlanks, a.Dot, dropBlanks)
// After a value, the line must end. There can be an optional comment.
endOfLineAfterValue = c.Seq(dropBlanks, a.EndOfLine.Or(dropComment))
endOfLineAfterValue = c.Seq(dropBlanks, a.EndOfLine.Or(comment))
)
func (t *parser) startKeyValuePair(p *parse.API) {

View File

@ -14,17 +14,26 @@ var (
// In this package, we name this a blank, to be in line with the
// terminology as used in parsekit.
blank = a.Runes('\t', ' ')
blanks = c.OneOrMore(blank)
optionalBlanks = c.ZeroOrMore(blank)
dropBlanks = m.Drop(optionalBlanks)
// Newline means LF (0x0A) or CRLF (0x0D0A).
// This matches the default newline as defined by parsekit.
newline = a.Newline
dropBlanks = m.Drop(c.ZeroOrMore(blank))
dropWhitespace = m.Drop(c.ZeroOrMore(blank.Or(newline)))
// Whitespace is defined as blanks + newlines.
whitespace = c.OneOrMore(blank.Or(newline))
optionalWhitespace = c.ZeroOrMore(blank.Or(newline))
dropWhitespace = m.Drop(optionalWhitespace)
// A '#' hash symbol marks the rest of the line as a comment.
// All characters up to the end of the line are included in the comment.
dropComment = m.Drop(c.Seq(a.Hash, c.ZeroOrMore(c.Not(a.EndOfLine)), m.Drop(a.EndOfLine)))
comment = c.Seq(a.Hash, c.ZeroOrMore(c.Not(a.EndOfLine)), m.Drop(a.EndOfLine))
endOfLineOrComment = optionalBlanks.Then(a.EndOfLine.Or(comment))
whitespaceOrComment = c.OneOrMore(c.Any(blank, newline, comment))
optionalWhitespaceOrComment = c.Optional(whitespaceOrComment)
)
// parser embeds the TOML ast.Document, so it can be extended with methods

View File

@ -31,10 +31,9 @@ import (
// 2, # this is ok
// ]
var (
arraySpace = c.ZeroOrMore(c.Any(blank, newline, dropComment))
arrayOpen = a.SquareOpen.Then(arraySpace)
arraySeparator = c.Seq(arraySpace, a.Comma, arraySpace)
arrayClose = c.Seq(c.Optional(arraySpace.Then(a.Comma)), arraySpace, a.SquareClose)
arrayOpen = a.SquareOpen.Then(optionalWhitespaceOrComment)
arraySeparator = c.Seq(optionalWhitespaceOrComment, a.Comma, optionalWhitespaceOrComment)
arrayClose = c.Seq(c.Optional(optionalWhitespaceOrComment.Then(a.Comma)), optionalWhitespaceOrComment, a.SquareClose)
)
func (t *parser) parseArray(p *parse.API) (*ast.Value, bool) {

View File

@ -8,11 +8,11 @@ import (
var (
// Opener and closer for [table].
tableOpen = c.Seq(dropBlanks, a.SquareOpen, dropBlanks)
tableClose = c.Seq(dropBlanks, a.SquareClose, dropBlanks, a.EndOfLine.Or(dropComment))
tableClose = c.Seq(dropBlanks, a.SquareClose, dropBlanks)
// Opener and closer for [[array.of.tables]].
tableArrayOpen = c.Seq(dropBlanks, a.SquareOpen, a.SquareOpen, dropBlanks)
tableArrayClose = c.Seq(dropBlanks, a.SquareClose, a.SquareClose, dropBlanks, a.EndOfLine.Or(dropComment))
tableArrayClose = c.Seq(dropBlanks, a.SquareClose, a.SquareClose, dropBlanks)
// Opener, separator and closer for { inline: "tables" }.
inlineTableOpen = c.Seq(dropBlanks, a.CurlyOpen, dropBlanks)
@ -75,6 +75,10 @@ func (t *parser) startArrayOfTables(p *parse.API) {
p.Expected("closing ']]' for array of tables name")
return
}
if !p.Accept(endOfLineOrComment) {
p.Expected("end of line or comment")
return
}
if err := t.OpenArrayOfTables(key); err != nil {
p.Error("%s", err)
return
@ -127,6 +131,10 @@ func (t *parser) startPlainTable(p *parse.API) {
p.Expected("closing ']' for table name")
return
}
if !p.Accept(endOfLineOrComment) {
p.Expected("end of line or comment")
return
}
if err := t.OpenTable(key); err != nil {
p.Error("%s", err)
return