go-toml/parse/document.go

64 lines
1.6 KiB
Go

package parse
import (
"git.makaay.nl/mauricem/go-parsekit/parse"
"git.makaay.nl/mauricem/go-parsekit/tokenize"
)
// Some globally useful tokenizer definitions.
var (
c, a, m, tok = tokenize.C, tokenize.A, tokenize.M, tokenize.T
// Whitespace means tab (0x09) or space (0x20).
// This matches the blanks as defined by parsekit.
whitespace = a.Blanks.Optional()
// Newline means LF (0x0A) or CRLF (0x0D0A).
// This matches the newline as defined by parsekit.
newline = a.Newline
// Whitespace + newlines.
// This matches the whitespace as defined by parsekit.
whitespaceInclNewlines = a.Whitespace
// A '#' hash symbol marks the rest of the line as a comment.
// All characters up to the end of the line are included in the comment.
comment = c.Seq(a.Hash, a.UntilEndOfLine)
optionalComment = comment.Optional()
endOfLineOrComment = c.Seq(whitespace, optionalComment, a.EndOfLine)
whitespaceNewlinesOrComments = whitespaceInclNewlines.Or(comment)
// Keys may be either bare or quoted.
detectKey = c.Any(bareKeyRune, a.SingleQuote, a.DoubleQuote)
// Both [tables] and [[arrays of tables]] start with a square open bracket.
detectTable = a.SquareOpen
)
func (t *parser) startDocument(p *parse.API) {
for {
switch {
case p.Skip(whitespaceNewlinesOrComments):
// NOOP
case p.Peek(detectKey):
p.Handle(t.startKeyValuePair)
case p.Peek(detectTable):
p.Handle(t.startTable)
case p.Skip(a.EndOfFile):
p.Stop()
default:
p.Expected("key/value pair, table or array of tables")
return
}
if p.IsStoppedOrInError() {
return
}
}
}