Skip to content

Commit

Permalink
#12 add more code to tokenizer, add unit tests to tokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
thegodenage committed Mar 18, 2024
1 parent c6a83b4 commit c83a414
Show file tree
Hide file tree
Showing 5 changed files with 191 additions and 36 deletions.
2 changes: 1 addition & 1 deletion internal/rule/expression_tree.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func newCustomRulesTokenizer() *expressionTreeBuilder {
}

func (c *expressionTreeBuilder) BuildExpressionTree(variable, expression string) (expressionTree, error) {
tokens, err := c.tokenizer.BuildTokens(variable, expression)
_, err := c.tokenizer.BuildTokens(variable, expression)
if err != nil {
return nil, fmt.Errorf("build tokens: %w", err)
}
Expand Down
File renamed without changes.
File renamed without changes.
115 changes: 80 additions & 35 deletions internal/rule/tokenizer.go
Original file line number Diff line number Diff line change
@@ -1,46 +1,29 @@
package rule

const (
operatorEq = "=="
operatorNotEq = "!="
operatorGt = ">"
operatorLs = "<"
operatorArrow = "=>"

methodLength = "LEN"
methodFormat = "FORMAT"

fieldPayload = "payload"
fieldHeaders = "headers"

tokenVariable = "variable"
)
import "fmt"

var (
// operators are compilable operators.
operators = []string{
operatorEq,
operatorNotEq,
operatorGt,
operatorLs,
operatorArrow,
}
tokenVariable = "var"
tokenFunction = "func"
tokenLParen = "("
tokenRParen = ")"
tokenDot = "."
tokenDoubleAmpersand = "&&"
tokenMoreThan = ">"
tokenLessThan = "<"
tokenNumber = "token_number"
tokenField = "field"

// methods are compilable methods.
methods = []string{
methodLength,
methodLen = "LEN"
methodFormat = "FORMAT"
methods = []string{
methodLen,
methodFormat,
}

// fields are compilable fields available in the request wrapper.
fields = []string{
fieldPayload,
fieldHeaders,
}
)

type Tokenizer interface {
BuildTokens(variable, expression string) ([]Token, error)
BuildTokens(variable string, expression string) ([]Token, error)
}

type Token struct {
Expand All @@ -51,8 +34,70 @@ type Token struct {
type tokenizer struct {
}

func (t *tokenizer) BuildTokens(variable, expression string) ([]Token, error) {
for _, r := range []rune(expression) {
func (t *tokenizer) BuildTokens(variable string, expression string) ([]Token, error) {
var (
tokens []Token
match []rune
)

runeExpression := []rune(expression)

for i, r := range runeExpression {
match = append(match, r)
strMatch := string(match)

v, ok := getFunction(match)
if ok {
tokens = append(tokens, Token{
Name: tokenFunction,
Value: v,
})

match = []rune{}
continue
}

if isVariable(match, variable) && len(expression) >= i+1 && runeExpression[i+1] == '.' {
tokens = append(tokens, Token{
Name: tokenVariable,
Value: string(match),
})

match = []rune{}
continue
}

if strMatch == "." {
tokens = append(tokens, Token{
Name: tokenDot,
Value: tokenDot,
})

match = []rune{}
continue
}

}

return tokens, nil
}

func getFunction(match []rune) (string, bool) {
strMatch := string(match)

for _, method := range methods {
if method == strMatch {
return method, true
}
}

return "", false
}

func isVariable(match []rune, variable string) bool {
strMatch := string(match)

return strMatch == fmt.Sprintf("%s", variable)
}

func getSpecialCharacter()

Check failure on line 103 in internal/rule/tokenizer.go

View workflow job for this annotation

GitHub Actions / build

missing function body
110 changes: 110 additions & 0 deletions internal/rule/tokenizer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
package rule

import (
"reflect"
"testing"
)

func Test_tokenizer_BuildTokens(t1 *testing.T) {
type args struct {
variable string
expression string
}
tests := []struct {
name string
args args
want []Token
wantErr bool
}{
{
name: "slice of tokens returned",
args: args{
variable: "p",
expression: "LEN(p.payload) > 0 && LEN(p.headers) > 0",
},
want: []Token{
{
Name: tokenFunction,
Value: "LEN",
},
{
Name: tokenLParen,
Value: tokenLParen,
},
{
Name: tokenVariable,
Value: "p",
},
{
Name: tokenDot,
Value: tokenDot,
},
{
Name: tokenField,
Value: "payload",
},
{
Name: tokenRParen,
Value: tokenRParen,
},
{
Name: tokenMoreThan,
Value: ">",
},
{
Name: tokenNumber,
Value: "0",
},
{
Name: tokenDoubleAmpersand,
Value: "&&",
},
{
Name: tokenFunction,
Value: "LEN",
},
{
Name: tokenLParen,
Value: tokenLParen,
},
{
Name: tokenVariable,
Value: "p",
},
{
Name: tokenDot,
Value: tokenDot,
},
{
Name: tokenField,
Value: "headers",
},
{
Name: tokenRParen,
Value: tokenRParen,
},
{
Name: tokenMoreThan,
Value: ">",
},
{
Name: tokenNumber,
Value: "0",
},
},
},
}
for _, tt := range tests {
t1.Run(tt.name, func(t1 *testing.T) {
t := &tokenizer{}
got, err := t.BuildTokens(tt.args.variable, tt.args.expression)
if (err != nil) != tt.wantErr {
t1.Errorf("BuildTokens() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t1.Errorf("BuildTokens() got = %v, want %v", got, tt.want)
}
})
}
}

0 comments on commit c83a414

Please sign in to comment.