Skip to content
Snippets Groups Projects
Verified Commit fef87b22 authored by Volker Schukai's avatar Volker Schukai :alien:
Browse files

feat: implementation of basic features

parent d3fc2bc4
No related branches found
No related tags found
No related merge requests found
...@@ -9,7 +9,7 @@ func BenchmarkTransformer(b *testing.B) { ...@@ -9,7 +9,7 @@ func BenchmarkTransformer(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
tt := NewTransformer(m) tt := NewTransformer(&m)
tt.Transform("static:AtEst | strtolower | strtoupper | prefix:b | md5") tt.Transform("static:AtEst | strtolower | strtoupper | prefix:b | md5")
} }
......
...@@ -4,4 +4,6 @@ go 1.19 ...@@ -4,4 +4,6 @@ go 1.19
require github.com/volker-schukai/tokenizer v1.0.0 require github.com/volker-schukai/tokenizer v1.0.0
require gitlab.schukai.com/oss/libraries/go/utilities/pathfinder v0.4.0
replace github.com/volker-schukai/tokenizer => /home/volker.schukai/projekte/github/tokenizer replace github.com/volker-schukai/tokenizer => /home/volker.schukai/projekte/github/tokenizer
...@@ -4,5 +4,9 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ ...@@ -4,5 +4,9 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/volker-schukai/tokenizer v1.0.0 h1:wF4haFoCodq7lgAk8c+th/DZmpFpL2WVD8wDzAGU1mA= github.com/volker-schukai/tokenizer v1.0.0 h1:wF4haFoCodq7lgAk8c+th/DZmpFpL2WVD8wDzAGU1mA=
github.com/volker-schukai/tokenizer v1.0.0/go.mod h1:LPw7lLIxUnZgeg96818N7IvwLE1x8ya31J/Aa0aCq9M= github.com/volker-schukai/tokenizer v1.0.0/go.mod h1:LPw7lLIxUnZgeg96818N7IvwLE1x8ya31J/Aa0aCq9M=
gitlab.schukai.com/oss/libraries/go/utilities/pathfinder v0.3.1 h1:oyElaqEiyr2XgaE1CYwD8LoeHsuR/vQD/p6k3jYbJFs=
gitlab.schukai.com/oss/libraries/go/utilities/pathfinder v0.3.1/go.mod h1:UvdD4NAf3gLKYafabJD7e9ZCOetzM9JZ9y4GkZukPVU=
gitlab.schukai.com/oss/libraries/go/utilities/pathfinder v0.4.0 h1:eAEsq3lsHwMe5Zz71vNab5csPtp8S+i5zFPmNnDPFDg=
gitlab.schukai.com/oss/libraries/go/utilities/pathfinder v0.4.0/go.mod h1:UvdD4NAf3gLKYafabJD7e9ZCOetzM9JZ9y4GkZukPVU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
...@@ -10,6 +10,7 @@ const ( ...@@ -10,6 +10,7 @@ const (
PipeCmdStatic PipeCmdStatic
PipeCmdIndex PipeCmdIndex
PipeCmdPath
PipeCmdToUpper PipeCmdToUpper
PipeCmdToLower PipeCmdToLower
PipeCmdEmpty PipeCmdEmpty
...@@ -68,6 +69,7 @@ func initTokenizer() *tokenizer.Tokenizer { ...@@ -68,6 +69,7 @@ func initTokenizer() *tokenizer.Tokenizer {
parser.DefineTokens(PipeCmdStatic, []string{"static"}) parser.DefineTokens(PipeCmdStatic, []string{"static"})
parser.DefineTokens(PipeCmdIndex, []string{"index", "dataset"}) parser.DefineTokens(PipeCmdIndex, []string{"index", "dataset"})
parser.DefineTokens(PipeCmdPath, []string{"path"})
parser.DefineTokens(PipeReflectionIsNil, []string{"isnull", "isnil"}) parser.DefineTokens(PipeReflectionIsNil, []string{"isnull", "isnil"})
parser.DefineTokens(PipeCmdNop, []string{"nop"}) parser.DefineTokens(PipeCmdNop, []string{"nop"})
......
...@@ -10,6 +10,7 @@ import ( ...@@ -10,6 +10,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"github.com/volker-schukai/tokenizer" "github.com/volker-schukai/tokenizer"
"gitlab.schukai.com/oss/libraries/go/utilities/pathfinder"
"html" "html"
"math" "math"
"net/url" "net/url"
...@@ -20,16 +21,17 @@ import ( ...@@ -20,16 +21,17 @@ import (
) )
type Transformer struct { type Transformer struct {
dataset map[any]any dataset *map[any]any
errors []error errors []error
parser *tokenizer.Tokenizer parser *tokenizer.Tokenizer
} }
type Number interface { //
int | int8 | int16 | int32 | int64 | uint | uint8 | uint16 | uint32 | uint64 //type Number interface {
} // int | int8 | int16 | int32 | int64 | uint | uint8 | uint16 | uint32 | uint64
//}
func NewTransformer(dataset map[any]any) *Transformer { func NewTransformer(dataset *map[any]any) *Transformer {
parser := initTokenizer() parser := initTokenizer()
return &Transformer{ return &Transformer{
...@@ -53,6 +55,10 @@ func (t *Transformer) Transform(pipe string) (interface{}, error) { ...@@ -53,6 +55,10 @@ func (t *Transformer) Transform(pipe string) (interface{}, error) {
return t.tokenize(pipe) return t.tokenize(pipe)
} }
func (t *Transformer) Dataset() *map[any]any {
return t.dataset
}
func (t *Transformer) tokenize(pipe string) (any, error) { func (t *Transformer) tokenize(pipe string) (any, error) {
// create tokens stream // create tokens stream
...@@ -65,6 +71,9 @@ func (t *Transformer) tokenize(pipe string) (any, error) { ...@@ -65,6 +71,9 @@ func (t *Transformer) tokenize(pipe string) (any, error) {
// iterate over each token // iterate over each token
for stream.IsValid() { for stream.IsValid() {
token := stream.CurrentToken() token := stream.CurrentToken()
if token == nil {
break
}
stream.GoNext() stream.GoNext()
if token.Is(PipeSymbol) { if token.Is(PipeSymbol) {
...@@ -85,8 +94,13 @@ func (t *Transformer) tokenize(pipe string) (any, error) { ...@@ -85,8 +94,13 @@ func (t *Transformer) tokenize(pipe string) (any, error) {
} }
var currentValue any var currentValue any
//datasetAvailable := true
currentValue = t.dataset currentValue = t.dataset
//if reflect.ValueOf(currentValue).IsNil() {
// datasetAvailable = false
//}
var err error var err error
var ok bool var ok bool
...@@ -107,30 +121,86 @@ func (t *Transformer) tokenize(pipe string) (any, error) { ...@@ -107,30 +121,86 @@ func (t *Transformer) tokenize(pipe string) (any, error) {
continue continue
} }
if tokens[0].Is(PipeCmdIndex) { if tokens[0].Is(PipeCmdPath) {
if len(tokens) > 1 {
parts := []string{}
for _, token := range tokens[1:] {
parts = append(parts, token.ValueUnescapedString())
}
path := strings.Join(parts, "")
currentValue, err = pathfinder.GetValue[any](currentValue, path)
if err != nil {
return nil, err
}
} else {
return nil, errors.New("invalid path command")
}
continue
} else if tokens[0].Is(PipeCmdIndex) {
var index string var index string
if index, err = handleIndexCommand(tokens); err != nil { if index, err = handleIndexCommand(tokens); err != nil {
return nil, err return nil, err
} }
switch currentValue.(type) { switch currentValue.(type) {
case *map[any]any:
if reflect.ValueOf(currentValue).IsNil() {
return nil, errors.New("index command on nil map")
}
currentValue, ok = (*currentValue.(*map[any]any))[index]
if !ok {
return nil, errors.New("index " + index + " not found")
}
case map[any]any: case map[any]any:
currentValue, ok = currentValue.(map[any]any)[index] currentValue, ok = currentValue.(map[any]any)[index]
if !ok { if !ok {
t.errors = append(t.errors, errors.New("index not found: "+index)) t.errors = append(t.errors, errors.New("index not found: "+index))
} }
case *map[string]any:
if reflect.ValueOf(currentValue).IsNil() {
return nil, errors.New("index command on nil map")
}
currentValue, ok = (*currentValue.(*map[string]any))[index]
if !ok {
t.errors = append(t.errors, errors.New("index not found: "+index))
}
case map[string]any: case map[string]any:
currentValue, ok = currentValue.(map[string]any)[index] currentValue, ok = currentValue.(map[string]any)[index]
if !ok { if !ok {
t.errors = append(t.errors, errors.New("index not found: "+index)) t.errors = append(t.errors, errors.New("index not found: "+index))
} }
case *[]string:
indexInt, err := strconv.Atoi(index)
if err != nil {
return nil, errors.New("index must be an integer")
}
currentValue = (*currentValue.(*[]string))[indexInt]
case []string: case []string:
indexInt, err := strconv.Atoi(index) indexInt, err := strconv.Atoi(index)
if err != nil { if err != nil {
return nil, err return nil, err
} }
currentValue = currentValue.([]string)[indexInt] currentValue = currentValue.([]string)[indexInt]
case *[]any:
indexInt, err := strconv.Atoi(index)
if err != nil {
return nil, err
}
currentValue = (*currentValue.(*[]any))[indexInt]
case []any: case []any:
indexInt, err := strconv.Atoi(index) indexInt, err := strconv.Atoi(index)
...@@ -141,8 +211,19 @@ func (t *Transformer) tokenize(pipe string) (any, error) { ...@@ -141,8 +211,19 @@ func (t *Transformer) tokenize(pipe string) (any, error) {
if currentValue == nil { if currentValue == nil {
t.errors = append(t.errors, errors.New("index not found: "+index)) t.errors = append(t.errors, errors.New("index not found: "+index))
} }
default: default:
return nil, errors.New("invalid index command") //var value generic[currentValue]
//
//genericValue := reflect.ValueOf(currentValue)
vxx, err := pathfinder.GetValue[any](currentValue, index)
fmt.Println(vxx, err)
//case struct{}:
// return nil, errors.New("index command on struct")
//
//default:
// return nil, errors.New("unsupported type " + reflect.TypeOf(currentValue).String())
} }
continue continue
...@@ -177,16 +258,26 @@ func (t *Transformer) tokenize(pipe string) (any, error) { ...@@ -177,16 +258,26 @@ func (t *Transformer) tokenize(pipe string) (any, error) {
continue continue
case *map[any]any:
v, ok := currentValue.(*map[any]any)
if !ok {
return nil, errors.New("invalid map")
}
if currentValue, err = handleMap(tokens, v); err != nil {
return nil, err
}
case map[any]any, map[string]any: case map[any]any, map[string]any:
v, ok := currentValue.(map[any]any) v, ok := currentValue.(map[any]any)
if !ok { if !ok {
v = make(map[any]any) m := make(map[any]any)
for k, vv := range currentValue.(map[string]any) { for k, vv := range currentValue.(map[string]any) {
v[k] = vv m[k] = vv
} }
v = m
} }
if currentValue, err = handleMap(tokens, v); err != nil { if currentValue, err = handleMap(tokens, &v); err != nil {
return nil, err return nil, err
} }
continue continue
...@@ -205,7 +296,7 @@ func (t *Transformer) tokenize(pipe string) (any, error) { ...@@ -205,7 +296,7 @@ func (t *Transformer) tokenize(pipe string) (any, error) {
continue continue
default: default:
return nil, errors.New("invalid type") return nil, errors.New("the type " + reflect.TypeOf(currentValue).String() + " is not supported")
} }
return nil, errors.New("unknown command " + tokens[0].ValueUnescapedString()) return nil, errors.New("unknown command " + tokens[0].ValueUnescapedString())
...@@ -330,13 +421,13 @@ func handleFloat(tokens TokenList, currentValue float64) (any, error) { ...@@ -330,13 +421,13 @@ func handleFloat(tokens TokenList, currentValue float64) (any, error) {
return nil, errors.New(tokens[0].ValueString() + " is not a valid command for value of type float (" + strconv.FormatFloat(currentValue, 'f', -1, 64) + ")") return nil, errors.New(tokens[0].ValueString() + " is not a valid command for value of type float (" + strconv.FormatFloat(currentValue, 'f', -1, 64) + ")")
} }
func handleMap(tokens TokenList, currentValue map[any]any) (any, error) { func handleMap(tokens TokenList, currentValue *map[any]any) (any, error) {
if tokens[0].Is(PipeCmdToJSON) { if tokens[0].Is(PipeCmdToJSON) {
// convert to string map // convert to string map
stringMap := make(map[string]any) stringMap := make(map[string]any)
for k, v := range currentValue { for k, v := range *currentValue {
key, ok := k.(string) key, ok := k.(string)
if !ok { if !ok {
return nil, errors.New("invalid key type for json conversion") return nil, errors.New("invalid key type for json conversion")
......
...@@ -5,7 +5,7 @@ import ( ...@@ -5,7 +5,7 @@ import (
) )
func TestNewTransformer(t *testing.T) { func TestNewTransformer(t *testing.T) {
tt := NewTransformer(map[any]any{}) tt := NewTransformer(&map[any]any{})
if tt == nil { if tt == nil {
t.Error("Transformer is nil") t.Error("Transformer is nil")
} }
...@@ -638,7 +638,7 @@ func TestTransformValues(t *testing.T) { ...@@ -638,7 +638,7 @@ func TestTransformValues(t *testing.T) {
t.Run(td.instruction, func(t *testing.T) { t.Run(td.instruction, func(t *testing.T) {
tt := NewTransformer(td.data) tt := NewTransformer(&td.data)
result, err := tt.Transform(td.instruction) result, err := tt.Transform(td.instruction)
if td.hasError { if td.hasError {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment