Skip to content

Refactor idea to simplify interfaces #95

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jun 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions analyze_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2110,9 +2110,7 @@ func TestAnalyze_lua(t *testing.T) {
err := analyze("nginx.conf", tc.stmt, ";", tc.ctx, &ParseOptions{
MatchFuncs: []MatchFunc{MatchLua},
LexOptions: LexOptions{
ExternalLexers: []Lexer{
&Lua{},
},
Lexers: []RegisterLexer{lua.RegisterLexer()},
},
})

Expand Down
66 changes: 45 additions & 21 deletions build.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,45 @@ import (
)

type BuildOptions struct {
Indent int
Tabs bool
Header bool
ExternalBuilds []Builder // handle specific directives
Indent int
Tabs bool
Header bool
Builders []RegisterBuilder // handle specific directives
extBuilders map[string]Builder
}

// RegisterBuilder is an option that can be used to add a builder to build NGINX configuration for custom directives.
type RegisterBuilder interface {
applyBuildOptions(options *BuildOptions)
}

type registerBuilder struct {
b Builder
directives []string
}

func (rb registerBuilder) applyBuildOptions(o *BuildOptions) {
if o.extBuilders == nil {
o.extBuilders = make(map[string]Builder)
}

for _, s := range rb.directives {
o.extBuilders[s] = rb.b
}
}

// BuildWithBuilder registers a builder to build the NGINX configuration for the given directives.
func BuildWithBuilder(b Builder, directives ...string) RegisterBuilder {
return registerBuilder{b: b, directives: directives}
}

// Builder is the interface implemented by types that can render a Directive
// as it appears in NGINX configuration files.
//
// RegisterBuilder returns the names of the directives for which the builder can
// build NGINX configuration.
//
// Build writes the strings that represent the Directive and it's Block to the
// io.StringWriter returning any error encountered that caused the write to stop
// early. Build must not modify the Directive.
type Builder interface {
RegisterBuilder() []string
Build(stmt *Directive) string
}

Expand Down Expand Up @@ -63,6 +85,10 @@ func BuildFiles(payload Payload, dir string, options *BuildOptions) error {
dir = cwd
}

for _, o := range options.Builders {
o.applyBuildOptions(options)
}

for _, config := range payload.Config {
path := config.File
if !filepath.IsAbs(path) {
Expand Down Expand Up @@ -111,6 +137,12 @@ func Build(w io.Writer, config Config, options *BuildOptions) error {
}
}

if options.extBuilders == nil { // might be set if using BuildFiles
for _, o := range options.Builders {
o.applyBuildOptions(options)
}
}

body := strings.Builder{}
buildBlock(&body, nil, config.Parsed, 0, 0, options)

Expand All @@ -123,7 +155,7 @@ func Build(w io.Writer, config Config, options *BuildOptions) error {
return err
}

//nolint:funlen,gocognit
//nolint:gocognit
func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth int, lastLine int, options *BuildOptions) {
for i, stmt := range block {
// if the this statement is a comment on the same line as the preview, do not emit EOL for this stmt
Expand All @@ -147,18 +179,10 @@ func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth i
directive := Enquote(stmt.Directive)
_, _ = sb.WriteString(directive)

if options.ExternalBuilds != nil {
extDirectivesMap := make(map[string]Builder)
for _, ext := range options.ExternalBuilds {
directives := ext.RegisterBuilder()
for _, d := range directives {
extDirectivesMap[d] = ext
}

if ext, ok := extDirectivesMap[directive]; ok {
_, _ = sb.WriteString(" ") // space between directives and arguments
_, _ = sb.WriteString(ext.Build(stmt))
}
if options.extBuilders != nil {
if ext, ok := options.extBuilders[directive]; ok {
_, _ = sb.WriteString(" ") // space between directives and arguments
_, _ = sb.WriteString(ext.Build(stmt))
}
} else {
// special handling for if statements
Expand Down
4 changes: 2 additions & 2 deletions build_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ var buildFixtures = []buildFixture{
},
{
name: "lua block",
options: BuildOptions{ExternalBuilds: []Builder{&Lua{}}},
options: BuildOptions{Builders: []RegisterBuilder{lua.RegisterBuilder()}},
parsed: Directives{
{
Directive: "content_by_lua_block",
Expand All @@ -273,7 +273,7 @@ var buildFixtures = []buildFixture{
},
{
name: "set_by_lua_block",
options: BuildOptions{ExternalBuilds: []Builder{&Lua{}}},
options: BuildOptions{Builders: []RegisterBuilder{lua.RegisterBuilder()}},
parsed: Directives{
{
Directive: "set_by_lua_block",
Expand Down
62 changes: 36 additions & 26 deletions lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,27 +46,53 @@ func SetTokenChanCap(size int) {

// Lexer is an interface for implementing lexers that handle external NGINX tokens during the lexical analysis phase.
type Lexer interface {
// RegisterLexer registers an external lexer with a given SubScanner.
// This method integrates the external lexer into the lexical analysis process,
// enabling it to handle external token scanning. It returns a slice of strings
// representing the tokens that the external lexer can recognize.
RegisterLexer(scanner *SubScanner) []string
// Lex processes a matched token and returns a channel of NgxToken objects.
// This method performs lexical analysis on the matched token and produces a stream of tokens for the parser to consume.
// The external lexer should close the channel once it has completed lexing the input to signal the end of tokens.
// Failure to close the channel will cause the receiver to wait indefinitely.
Lex(matchedToken string) <-chan NgxToken
Lex(s *SubScanner, matchedToken string) <-chan NgxToken
}

// LexOptions allows customization of the lexing process by specifying external lexers
// that can handle specific directives. By registering interest in particular directives,
// external lexers can ensure that these directives are processed separately
// from the general lexical analysis logic.
type LexOptions struct {
ExternalLexers []Lexer
Lexers []RegisterLexer
extLexers map[string]Lexer
}

// RegisterLexer is an option that cna be used to add a lexer to tokenize external NGINX tokens.
type RegisterLexer interface {
applyLexOptions(options *LexOptions)
}

type registerLexer struct {
l Lexer
stringTokens []string
}

func (rl registerLexer) applyLexOptions(o *LexOptions) {
if o.extLexers == nil {
o.extLexers = make(map[string]Lexer)
}

for _, s := range rl.stringTokens {
o.extLexers[s] = rl.l
}
}

// LexWithLexer registers a Lexer that implements tokenization of an NGINX configuration after one of the given
// stringTokens is encountered by Lex.
func LexWithLexer(l Lexer, stringTokens ...string) RegisterLexer {
return registerLexer{l: l, stringTokens: stringTokens}
}

func LexWithOptions(r io.Reader, options LexOptions) chan NgxToken {
for _, o := range options.Lexers {
o.applyLexOptions(&options)
}

tc := make(chan NgxToken, tokChanCap)
go tokenize(r, tc, options)
return tc
Expand Down Expand Up @@ -119,22 +145,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
lexState = skipSpace
}

var externalLexers map[string]Lexer
var externalScanner *SubScanner
for _, ext := range options.ExternalLexers {
if externalLexers == nil {
externalLexers = make(map[string]Lexer)
}

if externalScanner == nil {
externalScanner = &SubScanner{scanner: scanner, tokenLine: tokenLine}
}

for _, d := range ext.RegisterLexer(externalScanner) {
externalLexers[d] = ext
}
}

for {
if readNext {
if !scanner.Scan() {
Expand Down Expand Up @@ -167,13 +177,13 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
if token.Len() > 0 {
tokenStr := token.String()
if nextTokenIsDirective {
if ext, ok := externalLexers[tokenStr]; ok {
if ext, ok := options.extLexers[tokenStr]; ok {
// saving lex state before emitting tokenStr to know if we encountered start quote
lastLexState := lexState
emit(tokenStartLine, lexState == inQuote, nil)

externalScanner.tokenLine = tokenLine
extTokenCh := ext.Lex(tokenStr)
externalScanner := &SubScanner{scanner: scanner, tokenLine: tokenLine}
extTokenCh := ext.Lex(externalScanner, tokenStr)
for tok := range extTokenCh {
tokenCh <- tok
}
Expand Down
6 changes: 3 additions & 3 deletions lex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -429,10 +429,10 @@ func TestLex(t *testing.T) {
t.Fatal(err)
}
defer file.Close()

lua := &Lua{}
options := LexOptions{
ExternalLexers: []Lexer{
&Lua{},
},
Lexers: []RegisterLexer{lua.RegisterLexer()},
}
i := 0

Expand Down
47 changes: 22 additions & 25 deletions lua.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@ import (
"strings"
)

type Lua struct {
s *SubScanner
}
type Lua struct{}

func (l *Lua) directiveNames() []string {
return []string{
Expand All @@ -30,13 +28,12 @@ func (l *Lua) directiveNames() []string {
}
}

func (l *Lua) RegisterLexer(s *SubScanner) []string {
l.s = s
return l.directiveNames()
func (l *Lua) RegisterLexer() RegisterLexer {
return LexWithLexer(l, l.directiveNames()...)
}

//nolint:funlen,gocognit,gocyclo,nosec
func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
func (l *Lua) Lex(s *SubScanner, matchedToken string) <-chan NgxToken {
tokenCh := make(chan NgxToken)

tokenDepth := 0
Expand All @@ -51,21 +48,21 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
if matchedToken == "set_by_lua_block" /* #nosec G101 */ {
arg := ""
for {
if !l.s.Scan() {
if !s.Scan() {
return
}
next := l.s.Text()
next := s.Text()
if isSpace(next) {
if arg != "" {
tokenCh <- NgxToken{Value: arg, Line: l.s.Line(), IsQuoted: false}
tokenCh <- NgxToken{Value: arg, Line: s.Line(), IsQuoted: false}
break
}

for isSpace(next) {
if !l.s.Scan() {
if !s.Scan() {
return
}
next = l.s.Text()
next = s.Text()
}
}
arg += next
Expand All @@ -74,14 +71,14 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {

// check that Lua block starts correctly
for {
if !l.s.Scan() {
if !s.Scan() {
return
}
next := l.s.Text()
next := s.Text()

if !isSpace(next) {
if next != "{" {
lineno := l.s.Line()
lineno := s.Line()
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `expected "{" to start lua block`, Line: &lineno}}
return
}
Expand All @@ -92,13 +89,13 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {

// Grab everything in Lua block as a single token and watch for curly brace '{' in strings
for {
if !l.s.Scan() {
if !s.Scan() {
return
}

next := l.s.Text()
if err := l.s.Err(); err != nil {
lineno := l.s.Line()
next := s.Text()
if err := s.Err(); err != nil {
lineno := s.Line()
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: err.Error(), Line: &lineno}}
}

Expand All @@ -112,7 +109,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
case next == "}" && !inQuotes:
tokenDepth--
if tokenDepth < 0 {
lineno := l.s.Line()
lineno := s.Line()
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `unexpected "}"`, Line: &lineno}}
return
}
Expand All @@ -122,8 +119,8 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
}

if tokenDepth == 0 {
tokenCh <- NgxToken{Value: tok.String(), Line: l.s.Line(), IsQuoted: true}
tokenCh <- NgxToken{Value: ";", Line: l.s.Line(), IsQuoted: false} // For an end to the Lua string based on the nginx bahavior
tokenCh <- NgxToken{Value: tok.String(), Line: s.Line(), IsQuoted: true}
tokenCh <- NgxToken{Value: ";", Line: s.Line(), IsQuoted: false} // For an end to the Lua string based on the nginx bahavior
// See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41
return
}
Expand All @@ -142,7 +139,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {

// stricly check that first non space character is {
if tokenDepth == 0 {
tokenCh <- NgxToken{Value: next, Line: l.s.Line(), IsQuoted: false}
tokenCh <- NgxToken{Value: next, Line: s.Line(), IsQuoted: false}
return
}
tok.WriteString(next)
Expand All @@ -153,8 +150,8 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
return tokenCh
}

func (l *Lua) RegisterBuilder() []string {
return l.directiveNames()
func (l *Lua) RegisterBuilder() RegisterBuilder {
return BuildWithBuilder(l, l.directiveNames()...)
}

func (l *Lua) Build(stmt *Directive) string {
Expand Down
Loading