Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 56 additions & 7 deletions parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,19 @@ type TokenReader interface {
ReadToken() (Token, error)
}

// ParseFlags is a bitset of any boolean flags that can be set for a Parser.
type ParseFlags uint

func (f ParseFlags) isSet(flags ParseFlags) bool {
return f&flags == flags
}

const (
// Whether to treat an end-of-line (newline) as a sentinel. This is the same as using
// newlines instead of semicolons.
ParseSentinelEOL ParseFlags = 1 << iota
)

// Parser consumes tokens from a TokenReader and constructs a codf *Document from it.
//
// The Document produced by the Parser is kept for the duration of the parser's lifetime, so it is
Expand All @@ -23,6 +36,8 @@ type Parser struct {
doc *Document
next tokenConsumer

flags ParseFlags

lastToken Token
lastErr error

Expand All @@ -49,6 +64,16 @@ func NewParser() *Parser {
return p
}

// Flags returns the current ParseFlags for the receiver.
func (p *Parser) Flags() ParseFlags {
return p.flags
}

// SetFlags sets the ParseFlags for the receiver.
func (p *Parser) SetFlags(flags ParseFlags) {
p.flags = flags
}

func (p *Parser) nextToken(tr TokenReader) (tok Token, err error) {
tok, err = tr.ReadToken()
p.lastToken, p.lastErr = tok, err
Expand Down Expand Up @@ -107,7 +132,7 @@ func (p *Parser) ParseExpr(tr TokenReader) (ExprNode, error) {
exp := exprParser{}
p.ctx = []parseNode{&exp}
p.parseErr = nil
p.next = skipWhitespace(p.parseStatement)
p.next = p.skipInsignificantWhitespace(p.parseStatement)
if err := p.Parse(tr); err != nil {
return nil, err
}
Expand Down Expand Up @@ -220,7 +245,7 @@ func (p *Parser) beginSegment(tok Token) (tokenConsumer, error) {
// Start statement
stmt := &Statement{NameTok: &Literal{tok}}
p.pushContext(stmt)
return skipWhitespace(p.parseStatement), nil
return p.skipInsignificantWhitespace(p.parseStatement), nil
}
return nil, unexpected(tok, "expected statement or section name")
}
Expand All @@ -236,11 +261,35 @@ func skipWhitespace(next tokenConsumer) (consumer tokenConsumer) {
return consumer
}

func (p *Parser) skipInsignificantWhitespace(next tokenConsumer) (consumer tokenConsumer) {
if !p.flags.isSet(ParseSentinelEOL) {
return skipWhitespace(next)
}
consumer = func(tok Token) (tokenConsumer, error) {
if tok.Kind == TWhitespace && tok.Start.Line == tok.End.Line {
return consumer, nil
} else if tok.Kind == TComment {
return consumer, nil
}
return next(tok)
}
return consumer
}

func (p *Parser) parseStatementSentinel(tok Token) (tokenConsumer, error) {
switch tok.Kind {
case TEOF:
return nil, p.closeError(tok)

case TWhitespace:
if stmt, ok := p.context().(*Statement); ok {
p.popContext()
stmt.EndTok = tok
p.context().(parentNode).addChild(stmt)
return p.beginSegment, nil
}
return nil, p.closeError(tok)

case TSemicolon:
if stmt, ok := p.context().(*Statement); ok {
p.popContext()
Expand All @@ -257,7 +306,7 @@ func (p *Parser) parseStatementSentinel(tok Token) (tokenConsumer, error) {
if err := p.context().(segmentNode).addExpr(ary); err != nil {
return nil, err
}
return skipWhitespace(p.parseStatement), nil
return p.skipInsignificantWhitespace(p.parseStatement), nil
}
return nil, p.closeError(tok)

Expand All @@ -272,7 +321,7 @@ func (p *Parser) parseStatementSentinel(tok Token) (tokenConsumer, error) {
if err := p.context().(segmentNode).addExpr(m); err != nil {
return nil, err
}
return skipWhitespace(p.parseStatement), nil
return p.skipInsignificantWhitespace(p.parseStatement), nil
}
return nil, p.closeError(tok)

Expand All @@ -294,14 +343,14 @@ func (p *Parser) beginArray(tok Token) (tokenConsumer, error) {
StartTok: tok,
Elems: []ExprNode{},
})
return skipWhitespace(p.parseStatement), nil
return p.skipInsignificantWhitespace(p.parseStatement), nil
}

func (p *Parser) beginMap(tok Token) (tokenConsumer, error) {
m := newMapBuilder()
m.m.StartTok = tok
p.pushContext(m)
return skipWhitespace(p.parseStatement), nil
return p.skipInsignificantWhitespace(p.parseStatement), nil
}

func (p *Parser) parseStatement(tok Token) (tokenConsumer, error) {
Expand All @@ -327,7 +376,7 @@ func (p *Parser) parseStatement(tok Token) (tokenConsumer, error) {
if err := p.context().(segmentNode).addExpr(&Literal{tok}); err != nil {
return nil, err
}
return skipWhitespace(p.parseStatement), nil
return p.skipInsignificantWhitespace(p.parseStatement), nil
}

return p.parseStatementSentinel(tok)
Expand Down
69 changes: 59 additions & 10 deletions parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,32 +7,44 @@ import (
"time"
)

func parse(in string) (*Document, error) {
type configFunc func(*Lexer, *Parser)

func parserSetSentinelEOL(_ *Lexer, p *Parser) {
p.SetFlags(ParseSentinelEOL)
if p.Flags()&ParseSentinelEOL != ParseSentinelEOL {
panic("unable to set ParseSentinelEOL")
}
}

func parse(in string, config ...configFunc) (*Document, error) {
r := strings.NewReader(in)
l := NewLexer(r)
p := NewParser()
for _, fn := range config {
fn(l, p)
}
if err := p.Parse(l); err != nil {
return nil, err
}
return p.Document(), nil
}

func mustParse(t *testing.T, in string) *Document {
doc, err := parse(in)
func mustParse(t *testing.T, in string, config ...configFunc) *Document {
doc, err := parse(in, config...)
if err != nil {
t.Fatalf("Parse(..) error = %v; want nil", err)
}
t.Logf("-------- DOCUMENT --------\n%s\n------ END DOCUMENT ------", doc)
return doc
}

func mustParseNamed(t *testing.T, name string, in string) *Document {
func mustParseNamed(t *testing.T, name string, in string, config ...configFunc) *Document {
doc := mustParse(t, in)
doc.Name = name
return doc
}

func mustNotParse(t *testing.T, in string) *Document {
func mustNotParse(t *testing.T, in string, config ...configFunc) *Document {
doc, err := parse(in)
if err == nil {
t.Fatalf("Parse(..) error = %v; want error", err)
Expand All @@ -43,10 +55,11 @@ func mustNotParse(t *testing.T, in string) *Document {

// parseTestCase is used to describe and run a parser test, optionally as a subtest.
type parseTestCase struct {
Name string
Src string
Doc *Document
Fun func(*testing.T, string) *Document
Name string
Src string
Doc *Document
Fun func(*testing.T, string, ...configFunc) *Document
Config []configFunc
}

func (p parseTestCase) RunSubtest(t *testing.T) {
Expand All @@ -59,7 +72,7 @@ func (p parseTestCase) Run(t *testing.T) {
if fn == nil {
fn = mustParse
}
doc := fn(t, p.Src)
doc := fn(t, p.Src, p.Config...)
objectsEqual(t, "", doc, p.Doc)
}

Expand Down Expand Up @@ -203,6 +216,42 @@ func TestParseExample(t *testing.T) {
}.Run(t)
}

func TestParseExampleSentinelEOL(t *testing.T) {
const exampleSource = `server go.spiff.io {
// Retain some semicolons to see they're still the same
listen 0.0.0.0:80;
control unix:///var/run/httpd.sock
proxy unix:///var/run/go-redirect.sock {
strip-x-headers yes
log-access no;
}
// keep caches in 64mb of memory
cache memory 64mb {
expire 10m 404
expire 1h 301 302;
expire 5m 200
}
}`

parseTestCase{
Src: exampleSource,
Config: []configFunc{parserSetSentinelEOL},
Doc: doc().
section("server", "go.spiff.io").
/* server */ statement("listen", "0.0.0.0:80").
/* server */ statement("control", "unix:///var/run/httpd.sock").
/* server */ section("proxy", "unix:///var/run/go-redirect.sock").
/* server */ /* proxy */ statement("strip-x-headers", true).
/* server */ /* proxy */ statement("log-access", false).
/* server */ up().
/* server */ section("cache", "memory", "64mb").
/* server */ /* cache */ statement("expire", time.Minute*10, 404).
/* server */ /* cache */ statement("expire", time.Hour, 301, 302).
/* server */ /* cache */ statement("expire", time.Minute*5, 200).
Doc(),
}.Run(t)
}

func TestParseEmpty(t *testing.T) {
t.Run("Empty", func(t *testing.T) {
objectsEqual(t, "",
Expand Down