package parse import ( "git.makaay.nl/mauricem/go-parsekit/parse" "git.makaay.nl/mauricem/go-parsekit/tokenize" "git.makaay.nl/mauricem/go-toml/ast" ) // Some globally useful tokenizer definitions. var ( c, a, m, tok = tokenize.C, tokenize.A, tokenize.M, tokenize.T // Whitespace means tab (0x09) or space (0x20). // The matches the blanks as defined by parsekit. whitespace = a.Blanks.Optional() // Newline means LF (0x0A) or CRLF (0x0D0A). // This matches the newline as defined by parsekit. newline = a.Newline // Whitespace + newlines. // This matches the whitespace as defined by parsekit. whitespaceInclNewlines = a.Whitespace // A '#' hash symbol marks the rest of the line as a comment. // All characters up to the end of the line are included in the comment. comment = c.Seq(a.Hash, a.UntilEndOfLine) optionalComment = comment.Optional() endOfLineOrComment = c.Seq(whitespace, optionalComment, a.EndOfLine) whitespaceOrComment = c.ZeroOrMore(whitespaceInclNewlines.Or(comment)) ) type parser struct { doc *ast.Document } func newParser() *parser { doc := ast.NewDocument() return &parser{doc} } // Run the TOML parser against the provided input data. // // For an overview of allowed inputs, take a look at the documentation for // parsekit.read.New(). At the time of writing, you can make use of // strings, types implementing io.Reader and bufio.Readers. // // This function returns a TOML abstract syntax table structure and an // error (or nil when no error occurred). When an error occurred, the // TOML ast struct will contain the data that could be parsed up to the error. func Run(input interface{}) (ast.Table, error) { p := newParser() err := parse.New(p.startDocument)(input) return p.doc.Root, err } // RunWithoutSanityChecks runs the TOML parser against the provided input data. // The parsekit sanity checks are disabled during the parse run. func RunWithoutSanityChecks(input interface{}) (ast.Table, error) { p := newParser() err := parse.NewWithoutSanityChecks(p.startDocument)(input) return p.doc.Root, err }