diff --git a/cmd/main b/cmd/main deleted file mode 100755 index 99d34607e940f0d2a47bf5863e227e4371452568..0000000000000000000000000000000000000000 Binary files a/cmd/main and /dev/null differ diff --git a/cmd/main.go b/cmd/main.go deleted file mode 100644 index 2f6995dc697eb0f78c79bd8801b56e46ba7ce464..0000000000000000000000000000000000000000 --- a/cmd/main.go +++ /dev/null @@ -1,20 +0,0 @@ -package main - -import ( - "fmt" - "gitlab.schukai.com/oss/libraries/go/utilities/data.git" -) - -func main() { - - m := map[any]any{} - - for i := 0; i < 100000; i++ { - - tt := data.NewTransformer(m) - c, _ := tt.Transform("static:AtEst | strtolower | strtoupper | prefix:b | md5") - fmt.Println(c) - - } - -} diff --git a/cpu.prof b/cpu.prof deleted file mode 100644 index 441b340a108e8f0a24f84f6ffa0b94faf5b5309e..0000000000000000000000000000000000000000 Binary files a/cpu.prof and /dev/null differ diff --git a/go.mod b/go.mod index 78754190865f2cb3522a432db4309150e2e00431..84ae72ed1d7e6de4c723765b84b6613f4d116096 100644 --- a/go.mod +++ b/go.mod @@ -3,3 +3,5 @@ module gitlab.schukai.com/oss/libraries/go/utilities/data.git go 1.19 require github.com/volker-schukai/tokenizer v1.0.0 + +replace github.com/volker-schukai/tokenizer => /home/volker.schukai/projekte/github/tokenizer diff --git a/transformer.go b/transformer.go index 17196f4c704c4d9734ae29c96ecd677eae45b254..59de1b01051d1bc547ec9a333a0540ae3ddaffa1 100644 --- a/transformer.go +++ b/transformer.go @@ -22,6 +22,7 @@ import ( type Transformer struct { dataset map[any]any errors []error + parser *tokenizer.Tokenizer } type Number interface { @@ -29,8 +30,11 @@ type Number interface { } func NewTransformer(dataset map[any]any) *Transformer { + parser := initTokenizer() + return &Transformer{ dataset: dataset, + parser: parser, } } @@ -51,10 +55,8 @@ func (t *Transformer) Transform(pipe string) (interface{}, error) { func (t *Transformer) tokenize(pipe string) (any, error) { - parser := initTokenizer() - // create tokens stream - stream := parser.ParseString(pipe) + stream := t.parser.ParseString(pipe) defer stream.Close() tokenMap := make([]TokenList, 0)