Skip to content

Commit

Permalink
Conformance tests for the runtime and generated lexers.
Browse files Browse the repository at this point in the history
The goal is to have a single lexer definition that exercises all the
functionality of the stateful lexer and generated equivalent.

See #264
  • Loading branch information
alecthomas committed Sep 28, 2022
1 parent 0d264e9 commit fa71ac8
Show file tree
Hide file tree
Showing 8 changed files with 254 additions and 42 deletions.
@@ -1,9 +1,12 @@
// Code generated by Participle. DO NOT EDIT.
{{if .Tags}}//go:build {{.Tags}}
{{end -}}
package {{.Package}}

import (
"io"
"strings"
"sync"
"unicode/utf8"
"regexp/syntax"

Expand All @@ -12,7 +15,9 @@ import (
)

var _ syntax.Op
const _ = utf8.RuneError

var {{.Name}}BackRefCache sync.Map
var {{.Name}}Lexer lexer.Definition = lexer{{.Name}}DefinitionImpl{}

type lexer{{.Name}}DefinitionImpl struct {}
Expand All @@ -33,7 +38,7 @@ func (lexer{{.Name}}DefinitionImpl) LexString(filename string, s string) (lexer.
Line: 1,
Column: 1,
},
states: []lexer{{.Name}}State{ {name: "Root"} },
states: []lexer.State{ {Name: "Root"} },
}, nil
}

Expand All @@ -50,16 +55,11 @@ func (d lexer{{.Name}}DefinitionImpl) Lex(filename string, r io.Reader) (lexer.L
return d.LexString(filename, s.String())
}

type lexer{{.Name}}State struct {
name string
groups []string
}

type lexer{{.Name}}Impl struct {
s string
p int
pos lexer.Position
states []lexer{{.Name}}State
states []lexer.State
}

func (l *lexer{{.Name}}Impl) Next() (lexer.Token, error) {
Expand All @@ -71,7 +71,7 @@ func (l *lexer{{.Name}}Impl) Next() (lexer.Token, error) {
groups []int
sym lexer.TokenType
)
switch state.name {
switch state.Name {
{{- range $state := .Def.Rules|OrderRules}}
case "{{$state.Name}}":
{{- range $i, $rule := $state.Rules}}
Expand All @@ -84,7 +84,7 @@ func (l *lexer{{.Name}}Impl) Next() (lexer.Token, error) {
if true {
{{- end}}
{{- if .|IsPush}}
l.states = append(l.states, lexer{{$.Name}}State{name: "{{.|IsPush}}"{{if HaveBackrefs $.Def $state.Name}}, groups: l.sgroups(groups){{end}}})
l.states = append(l.states, lexer.State{Name: "{{.|IsPush}}"{{if HaveBackrefs $.Def $state.Name}}, Groups: l.sgroups(groups){{end}}})
{{- else if (or (.|IsPop) (.|IsReturn))}}
l.states = l.states[:len(l.states)-1]
{{- if .|IsReturn}}
Expand Down
24 changes: 17 additions & 7 deletions cmd/participle/gen_lexer_cmd.go
Expand Up @@ -18,8 +18,9 @@ import (
type genLexerCmd struct {
Name string `help:"Name of the lexer."`
Output string `short:"o" help:"Output file."`
Tags string `help:"Build tags to include in the generated file."`
Package string `arg:"" required:"" help:"Go package for generated code."`
Lexer string `arg:"" required:"" default:"-" type:"existingfile" help:"JSON representation of a Participle lexer."`
Lexer string `arg:"" default:"-" type:"existingfile" help:"JSON representation of a Participle lexer (read from stdin if omitted)."`
}

func (c *genLexerCmd) Help() string {
Expand Down Expand Up @@ -52,18 +53,26 @@ func (c *genLexerCmd) Run() error {
if err != nil {
return err
}
err = generateLexer(os.Stdout, c.Package, def, c.Name)
out := os.Stdout
if c.Output != "" {
out, err = os.Create(c.Output)
if err != nil {
return err
}
defer out.Close()
}
err = generateLexer(out, c.Package, def, c.Name, c.Tags)
if err != nil {
return err
}
return nil
}

var (
//go:embed files/codegen.go.tmpl
//go:embed codegen.go.tmpl
codegenTemplateSource string
codegenBackrefRe = regexp.MustCompile(`(\\+)(\d)`)
codegenTemplate *template.Template = template.Must(template.New("lexgen").Funcs(template.FuncMap{
codegenBackrefRe = regexp.MustCompile(`(\\+)(\d)`)
codegenTemplate = template.Must(template.New("lexgen").Funcs(template.FuncMap{
"IsPush": func(r lexer.Rule) string {
if p, ok := r.Action.(lexer.ActionPush); ok {
return p.State
Expand All @@ -89,14 +98,15 @@ var (
}).Parse(codegenTemplateSource))
)

func generateLexer(w io.Writer, pkg string, def *lexer.StatefulDefinition, name string) error {
func generateLexer(w io.Writer, pkg string, def *lexer.StatefulDefinition, name, tags string) error {
type ctx struct {
Package string
Name string
Tags string
Def *lexer.StatefulDefinition
}
rules := def.Rules()
err := codegenTemplate.Execute(w, ctx{pkg, name, def})
err := codegenTemplate.Execute(w, ctx{pkg, name, tags, def})
if err != nil {
return err
}
Expand Down
8 changes: 5 additions & 3 deletions cmd/participle/main.go
Expand Up @@ -4,10 +4,12 @@ import "github.com/alecthomas/kong"

var (
version string = "dev"
cli struct {

cli struct {
Version kong.VersionFlag
Gen struct {
Lexer genLexerCmd `cmd:""`

Gen struct {
Lexer genLexerCmd `cmd:"" help:"Generate a lexer."`
} `cmd:"" help:"Generate code to accelerate Participle."`
}
)
Expand Down
14 changes: 14 additions & 0 deletions lexer/internal/conformance/conformance_codegen_test.go
@@ -0,0 +1,14 @@
//go:build generated

package conformance_test

import (
"testing"

"github.com/alecthomas/participle/v2/lexer/internal/conformance"
)

// This should only be run by TestLexerConformanceGenerated.
func TestLexerConformanceGeneratedInternal(t *testing.T) {
testLexer(t, conformance.GeneratedConformanceLexer)
}
154 changes: 154 additions & 0 deletions lexer/internal/conformance/conformance_test.go
@@ -0,0 +1,154 @@
package conformance_test

import (
"encoding/json"
"flag"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"

"github.com/alecthomas/assert/v2"
"github.com/alecthomas/participle/v2/lexer"
)

var conformanceLexer = lexer.MustStateful(lexer.Rules{
"Root": {
{"String", `"`, lexer.Push("String")},
// {"Heredoc", `<<(\w+\b)`, lexer.Push("Heredoc")},
},
"String": {
{"Escaped", `\\.`, nil},
{"StringEnd", `"`, lexer.Pop()},
{"Expr", `\${`, lexer.Push("Expr")},
{"Char", `[^$"\\]+`, nil},
},
"Expr": {
lexer.Include("Root"),
{`Whitespace`, `\s+`, nil},
{`Oper`, `[-+/*%]`, nil},
{"Ident", `\w+`, lexer.Push("Reference")},
{"ExprEnd", `}`, lexer.Pop()},
},
"Reference": {
{"Dot", `\.`, nil},
{"Ident", `\w+`, nil},
lexer.Return(),
},
// "Heredoc": {
// {"End", `\b\1\b`, lexer.Pop()},
// lexer.Include("Expr"),
// },
})

type token struct {
Type string
Value string
}

func testLexer(t *testing.T, lex lexer.Definition) {
t.Helper()
tests := []struct {
name string
input string
expected []token
}{
{"Push", `"${"Hello ${name + "!"}"}"`, []token{
{"String", "\""},
{"Expr", "${"},
{"String", "\""},
{"Char", "Hello "},
{"Expr", "${"},
{"Ident", "name"},
{"Whitespace", " "},
{"Oper", "+"},
{"Whitespace", " "},
{"String", "\""},
{"Char", "!"},
{"StringEnd", "\""},
{"ExprEnd", "}"},
{"StringEnd", "\""},
{"ExprEnd", "}"},
{"StringEnd", "\""},
}},
{"Reference", `"${user.name}"`, []token{
{"String", "\""},
{"Expr", "${"},
{"Ident", "user"},
{"Dot", "."},
{"Ident", "name"},
{"ExprEnd", "}"},
{"StringEnd", "\""},
}},
}
symbols := lexer.SymbolsByRune(lex)
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
l, err := lex.Lex(test.name, strings.NewReader(test.input))
assert.NoError(t, err)
tokens, err := lexer.ConsumeAll(l)
assert.NoError(t, err)
actual := make([]token, len(tokens)-1)
for i, t := range tokens {
if t.Type == lexer.EOF {
continue
}
actual[i] = token{Type: symbols[t.Type], Value: t.Value}
}
assert.Equal(t, test.expected, actual)
})
}
}

func TestLexerConformanceGenerated(t *testing.T) {
genLexer(t)
args := []string{"test", "-run", "TestLexerConformanceGeneratedInternal", "-tags", "generated"}
// Propagate test flags.
flag.CommandLine.VisitAll(func(f *flag.Flag) {
if f.Value.String() != f.DefValue {
args = append(args, fmt.Sprintf("-%s=%s", f.Name, f.Value.String()))
}
})
cmd := exec.Command("go", args...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
assert.NoError(t, err)
}

func TestLexerConformance(t *testing.T) {
testLexer(t, conformanceLexer)
}

func genLexer(t *testing.T) {
t.Helper()
lexerJSON, err := json.Marshal(conformanceLexer)
assert.NoError(t, err)
cwd, err := os.Getwd()
assert.NoError(t, err)
generatedConformanceLexer := filepath.Join(cwd, "conformance_lexer_gen.go")
t.Cleanup(func() {
_ = os.Remove(generatedConformanceLexer)
})
cmd := exec.Command(
"../../../scripts/participle",
"gen", "lexer", "conformance",
"--tags", "generated",
"--name", "GeneratedConformance",
"--output", generatedConformanceLexer)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
w, err := cmd.StdinPipe()
assert.NoError(t, err)
defer w.Close()
err = cmd.Start()
assert.NoError(t, err)
_, err = w.Write(lexerJSON)
assert.NoError(t, err)
err = w.Close()
assert.NoError(t, err)
err = cmd.Wait()
assert.NoError(t, err)
}

0 comments on commit fa71ac8

Please sign in to comment.