From c83a414c6fef3bafb97fee731747cee1866db7e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Kowal?= Date: Mon, 18 Mar 2024 20:54:46 +0100 Subject: [PATCH] #12 add more code to tokenizer, add unit tests to tokenizer --- internal/rule/expression_tree.go | 2 +- internal/rule/{expression.go => node.go} | 0 .../rule/{expression_test.go => node_test.go} | 0 internal/rule/tokenizer.go | 115 ++++++++++++------ internal/rule/tokenizer_test.go | 110 +++++++++++++++++ 5 files changed, 191 insertions(+), 36 deletions(-) rename internal/rule/{expression.go => node.go} (100%) rename internal/rule/{expression_test.go => node_test.go} (100%) create mode 100644 internal/rule/tokenizer_test.go diff --git a/internal/rule/expression_tree.go b/internal/rule/expression_tree.go index 59bee1f..862107a 100644 --- a/internal/rule/expression_tree.go +++ b/internal/rule/expression_tree.go @@ -21,7 +21,7 @@ func newCustomRulesTokenizer() *expressionTreeBuilder { } func (c *expressionTreeBuilder) BuildExpressionTree(variable, expression string) (expressionTree, error) { - tokens, err := c.tokenizer.BuildTokens(variable, expression) + _, err := c.tokenizer.BuildTokens(variable, expression) if err != nil { return nil, fmt.Errorf("build tokens: %w", err) } diff --git a/internal/rule/expression.go b/internal/rule/node.go similarity index 100% rename from internal/rule/expression.go rename to internal/rule/node.go diff --git a/internal/rule/expression_test.go b/internal/rule/node_test.go similarity index 100% rename from internal/rule/expression_test.go rename to internal/rule/node_test.go diff --git a/internal/rule/tokenizer.go b/internal/rule/tokenizer.go index 415ccd9..34d486f 100644 --- a/internal/rule/tokenizer.go +++ b/internal/rule/tokenizer.go @@ -1,46 +1,29 @@ package rule -const ( - operatorEq = "==" - operatorNotEq = "!=" - operatorGt = ">" - operatorLs = "<" - operatorArrow = "=>" - - methodLength = "LEN" - methodFormat = "FORMAT" - - fieldPayload = "payload" - fieldHeaders = "headers" - - tokenVariable = "variable" -) +import "fmt" var ( - // operators are compilable operators. - operators = []string{ - operatorEq, - operatorNotEq, - operatorGt, - operatorLs, - operatorArrow, - } + tokenVariable = "var" + tokenFunction = "func" + tokenLParen = "(" + tokenRParen = ")" + tokenDot = "." + tokenDoubleAmpersand = "&&" + tokenMoreThan = ">" + tokenLessThan = "<" + tokenNumber = "token_number" + tokenField = "field" - // methods are compilable methods. - methods = []string{ - methodLength, + methodLen = "LEN" + methodFormat = "FORMAT" + methods = []string{ + methodLen, methodFormat, } - - // fields are compilable fields available in the request wrapper. - fields = []string{ - fieldPayload, - fieldHeaders, - } ) type Tokenizer interface { - BuildTokens(variable, expression string) ([]Token, error) + BuildTokens(variable string, expression string) ([]Token, error) } type Token struct { @@ -51,8 +34,70 @@ type Token struct { type tokenizer struct { } -func (t *tokenizer) BuildTokens(variable, expression string) ([]Token, error) { - for _, r := range []rune(expression) { +func (t *tokenizer) BuildTokens(variable string, expression string) ([]Token, error) { + var ( + tokens []Token + match []rune + ) + + runeExpression := []rune(expression) + + for i, r := range runeExpression { + match = append(match, r) + strMatch := string(match) + + v, ok := getFunction(match) + if ok { + tokens = append(tokens, Token{ + Name: tokenFunction, + Value: v, + }) + + match = []rune{} + continue + } + + if isVariable(match, variable) && len(expression) >= i+1 && runeExpression[i+1] == '.' { + tokens = append(tokens, Token{ + Name: tokenVariable, + Value: string(match), + }) + + match = []rune{} + continue + } + + if strMatch == "." { + tokens = append(tokens, Token{ + Name: tokenDot, + Value: tokenDot, + }) + + match = []rune{} + continue + } + + } + + return tokens, nil +} + +func getFunction(match []rune) (string, bool) { + strMatch := string(match) + for _, method := range methods { + if method == strMatch { + return method, true + } } + + return "", false +} + +func isVariable(match []rune, variable string) bool { + strMatch := string(match) + + return strMatch == fmt.Sprintf("%s", variable) } + +func getSpecialCharacter() diff --git a/internal/rule/tokenizer_test.go b/internal/rule/tokenizer_test.go new file mode 100644 index 0000000..c05022c --- /dev/null +++ b/internal/rule/tokenizer_test.go @@ -0,0 +1,110 @@ +package rule + +import ( + "reflect" + "testing" +) + +func Test_tokenizer_BuildTokens(t1 *testing.T) { + type args struct { + variable string + expression string + } + tests := []struct { + name string + args args + want []Token + wantErr bool + }{ + { + name: "slice of tokens returned", + args: args{ + variable: "p", + expression: "LEN(p.payload) > 0 && LEN(p.headers) > 0", + }, + want: []Token{ + { + Name: tokenFunction, + Value: "LEN", + }, + { + Name: tokenLParen, + Value: tokenLParen, + }, + { + Name: tokenVariable, + Value: "p", + }, + { + Name: tokenDot, + Value: tokenDot, + }, + { + Name: tokenField, + Value: "payload", + }, + { + Name: tokenRParen, + Value: tokenRParen, + }, + { + Name: tokenMoreThan, + Value: ">", + }, + { + Name: tokenNumber, + Value: "0", + }, + { + Name: tokenDoubleAmpersand, + Value: "&&", + }, + { + Name: tokenFunction, + Value: "LEN", + }, + { + Name: tokenLParen, + Value: tokenLParen, + }, + { + Name: tokenVariable, + Value: "p", + }, + { + Name: tokenDot, + Value: tokenDot, + }, + { + Name: tokenField, + Value: "headers", + }, + { + Name: tokenRParen, + Value: tokenRParen, + }, + { + Name: tokenMoreThan, + Value: ">", + }, + { + Name: tokenNumber, + Value: "0", + }, + }, + }, + } + for _, tt := range tests { + t1.Run(tt.name, func(t1 *testing.T) { + t := &tokenizer{} + got, err := t.BuildTokens(tt.args.variable, tt.args.expression) + if (err != nil) != tt.wantErr { + t1.Errorf("BuildTokens() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t1.Errorf("BuildTokens() got = %v, want %v", got, tt.want) + } + }) + } +}