diff --git a/Basic_test.go b/Basic_test.go index 9400d63..a620286 100644 --- a/Basic_test.go +++ b/Basic_test.go @@ -1,149 +1,26 @@ package jsonschematics import ( - "encoding/json" + v2 "github.com/ashbeelghouri/jsonschematics/data/v2" + "github.com/ashbeelghouri/jsonschematics/utils" "log" + "os" "testing" - "time" ) -func TestForObjData(t *testing.T) { - fnTimeStart := time.Now() - var schema Schematics - err := schema.LoadSchemaFromFile("json/schema.json") - schema.Logging.PrintErrorLogs = true - schema.Logging.PrintDebugLogs = true +func TestV2Validate(t *testing.T) { + schematics, err := v2.LoadJsonSchemaFile("test-data/schema/direct/v2/example-1.json") if err != nil { t.Error(err) } - data, err := GetJson("json/data.json") + content, err := os.ReadFile("test-data/data/direct/v2/example-2.json") if err != nil { t.Error(err) } - start := time.Now() - errs := schema.Validate(data) - log.Printf("[SINGLE OBJ] Validation Time: %v", time.Since(start)) - log.Print("[SINGLE OBJ] have single errors: ", errs.HaveSingleError("", "")) - errorsFromValidate, err := json.Marshal(errs) - if err != nil { - log.Fatalf("err: %v", err) - } - log.Println("[SINGLE OBJ] errorsFromValidate: ", string(errorsFromValidate)) - start = time.Now() - newData := schema.Operate(data) - log.Printf("[SINGLE OBJ] Operaions Time: %v", time.Since(start)) - log.Printf("[SINGLE OBJ] Updated DATA: %v", newData) - - log.Printf("[SINGLE OBJ] total time taken: %v", time.Since(fnTimeStart)) - log.Println("-------------------------------------------") -} - -func TestForArrayData(t *testing.T) { - fnTimeStart := time.Now() - var schema1 Schematics - schema1.Logging.PrintErrorLogs = true - schema1.Logging.PrintDebugLogs = true - err := schema1.LoadSchemaFromFile("json/schema.json") - schema1.ArrayIdKey = "user.id" - if err != nil { - t.Error(err) - } - data, err := GetJson("json/arr-data.json") - if err != nil { - t.Error(err) - } - start := time.Now() - errs := schema1.Validate(data) - log.Printf("[ARRAY OF OBJ] Validation Time: %v", time.Since(start)) - if errs != nil { - obj, err := json.Marshal(errs) - if err != nil { - log.Fatalf("err: %v", err) - } - log.Printf("array validations >>>> %v", string(obj)) - } else { - start = time.Now() - newData := schema1.Operate(data) - log.Printf("[ARRAY OF OBJ] Operation Time: %v", time.Since(start)) - log.Printf("[ARRAY OF OBJ] Updated Data: %v", newData) - } - log.Printf("[ARRAY OF OBJ] total time taken: %v", time.Since(fnTimeStart)) - log.Println("-------------------------------------------") -} - -func TestNestedArrays(t *testing.T) { - fnTimeStart := time.Now() - var schema Schematics - schema.Logging.PrintErrorLogs = true - schema.Logging.PrintDebugLogs = true - err := schema.LoadSchemaFromFile("json/arr-inside-obj-schema.json") + jsonData, err := utils.BytesToMap(content) if err != nil { t.Error(err) } - data, err := GetJson("json/arr-inside-obj-data.json") - if err != nil { - t.Error(err) - } - start := time.Now() - errs := schema.Validate(data) - log.Printf("[TestNestedArrays] Validation Time: %v", time.Since(start)) - if errs != nil { - jsonErrors, err := json.Marshal(errs) - if err != nil { - log.Fatalf("[TestNestedArrays] err: %v", err) - } - log.Println("[TestNestedArrays] json errors:", string(jsonErrors)) - } - - start = time.Now() - newData := schema.Operate(data) - log.Printf("[TestNestedArrays] Operation Time: %v", time.Since(start)) - log.Println("[TestNestedArrays] after operations:", newData) - log.Println("[TestNestedArrays] total time taken:", time.Since(fnTimeStart)) -} - -func TestDeepValidationInArray(t *testing.T) { - fnTimeStart := time.Now() - var schema Schematics - schema.Logging.PrintErrorLogs = true - schema.Logging.PrintDebugLogs = true - err := schema.LoadSchemaFromFile("json/arr-inside-obj-schema.json") - if err != nil { - log.Println("[TestDeepValidationInArray] unable to load the schema from json file: ", err) - t.Error(err) - } - data, err := GetJson("json/arr-inside-arr-obj-data.json") - if err != nil { - log.Println("[TestDeepValidationInArray] unable to load the data from json file: ", err) - t.Error(err) - } - start := time.Now() - errs := schema.Validate(data) - log.Printf("[TestDeepValidationInArray] Validation Time: %v", time.Since(start)) - - if errs != nil { - jsonErrors, err := json.Marshal(errs) - if err != nil { - log.Fatalf("[TestDeepValidationInArray] err: %v", err) - } - log.Println("[TestDeepValidationInArray] json errors:", string(jsonErrors)) - } - start = time.Now() - newData := schema.Operate(data) - log.Printf("[TestDeepValidationInArray] Operation Time: %v", time.Since(start)) - log.Println("[TestDeepValidationInArray] after operations:", newData) - log.Println("[TestDeepValidationInArray] total time taken:", time.Since(fnTimeStart)) -} - -func TestSchemaVersioning(t *testing.T) { - fnTimeStart := time.Now() - var schema Schematics - err := schema.LoadSchemaFromFile("json/schema-v1.1.json") - if err != nil { - log.Println("[TestSchemaVersioning] unable to load the schema from json file: ", err) - t.Error(err) - } - log.Println("Schema Version 1.1", schema) - - log.Println("[TestSchemaVersioning] total time taken:", time.Since(fnTimeStart)) + errs := schematics.Validate(jsonData) + log.Println(errs.GetStrings("en", "%data\n")) } diff --git a/api/parsers/request.go b/api/parsers/request.go new file mode 100644 index 0000000..9e6c699 --- /dev/null +++ b/api/parsers/request.go @@ -0,0 +1,47 @@ +package parsers + +import ( + "encoding/json" + "github.com/ashbeelghouri/jsonschematics" + "io" + "net/http" + "strings" +) + +func ParseRequest(r *http.Request) (map[string]interface{}, error) { + var headers map[string]string + for key, values := range r.Header { + headers[key] = values[0] + } + var body map[string]interface{} + if r.Body != nil { + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + return nil, err + } + err = json.Unmarshal(bodyBytes, &body) + if err != nil { + return nil, err + } + } + body = jsonschematics.DeflateMap(body, ".") + splitPath := strings.Split(r.RequestURI, "?") + // get query parameters + query := map[string]interface{}{} + if splitPath[1] != "" { + for _, param := range strings.Split(splitPath[1], "&") { + kv := strings.Split(param, "=") + if len(kv) == 2 { + query[kv[0]] = kv[1] + } + } + } + // already in the FLAT mode + return map[string]interface{}{ + "headers": headers, + "body": body, + "path": splitPath[0], + "method": r.Method, + "query": query, + }, nil +} diff --git a/api/v0/schema.go b/api/v0/schema.go new file mode 100644 index 0000000..2e1885b --- /dev/null +++ b/api/v0/schema.go @@ -0,0 +1,176 @@ +package v0 + +import ( + "github.com/ashbeelghouri/jsonschematics/api/parsers" + jsonschematics "github.com/ashbeelghouri/jsonschematics/data/v0" + "github.com/ashbeelghouri/jsonschematics/errorHandler" + "github.com/ashbeelghouri/jsonschematics/utils" + "net/http" + "regexp" + "strings" +) + +type TargetKey string +type EndpointKey string +type Name string + +type Field struct { + DependsOn []string + Name string + Type string + Required bool + Validators map[TargetKey]Constant + Operators map[TargetKey]Constant + L10n map[string]interface{} +} + +type Constant struct { + Attributes map[string]interface{} `json:"attributes"` + ErrMsg string `json:"error"` + L10n map[string]interface{} `json:"l10n"` +} + +type Global struct { + Headers map[TargetKey]Field +} + +type Endpoint struct { + Type string + Body map[TargetKey]Field + Headers map[TargetKey]Field + Query map[TargetKey]Field +} + +type Schema struct { + Version string + Global Global + Locale string + Logger utils.Logger + Endpoints map[EndpointKey]Endpoint +} + +func (s *Schema) GetSchematics(fieldType string, fields *map[TargetKey]Field) (*jsonschematics.Schematics, error) { + var schematics jsonschematics.Schematics + FieldKeys := jsonschematics.Field{ + DependsOn: []string{}, + Type: fieldType, + Validators: map[string]jsonschematics.Constant{}, + Operators: map[string]jsonschematics.Constant{}, + } + + schema := jsonschematics.Schema{ + Version: s.Version, + Fields: make(map[jsonschematics.TargetKey]jsonschematics.Field), + } + + for target, f := range *fields { + var allValidators map[string]jsonschematics.Constant + + for key, validator := range f.Validators { + allValidators[string(key)] = jsonschematics.Constant{ + Attributes: validator.Attributes, + Error: validator.ErrMsg, + L10n: validator.L10n, + } + } + var allOperations map[string]jsonschematics.Constant + for key, operator := range f.Operators { + allOperations[string(key)] = jsonschematics.Constant{ + Attributes: operator.Attributes, + Error: operator.ErrMsg, + L10n: operator.L10n, + } + } + FieldKeys.Type = f.Type + FieldKeys.Validators = allValidators + FieldKeys.Operators = allOperations + FieldKeys.L10n = s.Global.Headers[target].L10n + schema.Fields[jsonschematics.TargetKey(target)] = FieldKeys + } + + schematics.Schema = schema + return &schematics, nil +} + +func (s *Schema) ValidateRequest(r *http.Request) *errorHandler.Errors { + internalErrors := "internal-errors" + + var errorMessages errorHandler.Errors + var errMsg errorHandler.Error + errMsg.Validator = "request" + errMsg.Value = "all" + transformedRequest, err := parsers.ParseRequest(r) + if err != nil { + s.Logger.ERROR(err.Error()) + errMsg.AddMessage("en", "unable to transform request") + errorMessages.AddError(internalErrors, errMsg) + return &errorMessages + } + + globalHeadersSchematics, err := s.GetSchematics("Global Headers", &s.Global.Headers) + if err != nil { + s.Logger.ERROR(err.Error()) + errMsg.AddMessage("en", "schema conversion error") + errorMessages.AddError(internalErrors, errMsg) + return &errorMessages + } + errs := globalHeadersSchematics.Validate(transformedRequest["headers"]) + if errs.HasErrors() { + s.Logger.ERROR("all errors", err.Error()) + return errs + } + + for path, endpoint := range s.Endpoints { + regex := utils.GetPathRegex(string(path)) + matched, err := regexp.MatchString(regex, transformedRequest["path"].(string)) + if err != nil { + errMsg.AddMessage("en", "path not matched - regex not matched") + errorMessages.AddError(internalErrors, errMsg) + return &errorMessages + } + if matched { + s.Logger.DEBUG("url not matched") + return nil + } + + if strings.ToLower(endpoint.Type) == strings.ToLower(transformedRequest["method"].(string)) { + headerSchematics, err := s.GetSchematics("Headers", &endpoint.Headers) + if err != nil { + s.Logger.ERROR(err.Error()) + errMsg.AddMessage("en", err.Error()) + errorMessages.AddError(internalErrors, errMsg) + return &errorMessages + } + errs := headerSchematics.Validate(transformedRequest["headers"]) + if errs.HasErrors() { + s.Logger.ERROR("validation errors on headers:", errs.GetStrings("en", "%validator: %message")) + return errs + } + bodySchematics, err := s.GetSchematics("Body", &endpoint.Body) + if err != nil { + s.Logger.ERROR(err.Error()) + errMsg.AddMessage("en", err.Error()) + errorMessages.AddError(internalErrors, errMsg) + return &errorMessages + } + errs = bodySchematics.Validate(transformedRequest["body"]) + if errs.HasErrors() { + s.Logger.ERROR("validation errors on body:", errs.GetStrings("en", "%validator: %message")) + return errs + } + querySchematics, err := s.GetSchematics("Query", &endpoint.Query) + if err != nil { + s.Logger.ERROR(err.Error()) + errMsg.AddMessage("en", err.Error()) + errorMessages.AddError(internalErrors, errMsg) + return &errorMessages + } + errs = querySchematics.Validate(transformedRequest["query"]) + if errs.HasErrors() { + s.Logger.ERROR("validation errors on query:", errs.GetStrings("en", "%validator: %message")) + return errs + } + } + } + return nil +} diff --git a/api/v1/schema.go b/api/v1/schema.go new file mode 100644 index 0000000..8dd6a59 --- /dev/null +++ b/api/v1/schema.go @@ -0,0 +1,159 @@ +package v1 + +import ( + "encoding/json" + basic "github.com/ashbeelghouri/jsonschematics/api/v0" + "github.com/ashbeelghouri/jsonschematics/utils" + "log" + "os" +) + +var Logs utils.Logger + +type Schema struct { + Version string `json:"version"` + Global Global `json:"global"` + Endpoints map[string]Endpoint `json:"endpoints"` + Locale string + Logger utils.Logger +} + +type Global struct { + Headers []Field `json:"headers"` +} + +type Endpoint struct { + Path string `json:"path"` + Type string `json:"type"` + Body []Field `json:"body"` + Headers []Field `json:"headers"` + Query []Field `json:"query"` +} + +type Field struct { + DependsOn []string + Key string `json:"target_key"` + Validators map[string]Constant `json:"validators"` + Operators map[string]Constant `json:"operators"` + L10n map[string]interface{} `json:"l10n"` + AdditionalInformation map[string]interface{} `json:"additional_information"` +} + +type Constant struct { + Attributes map[string]interface{} `json:"attributes"` + ErrMsg string `json:"error"` + L10n map[string]interface{} `json:"l10n"` +} + +func (s *Schema) Configs() { + Logs = s.Logger + if s.Logger.PrintDebugLogs { + log.Println("debugger is on") + } + if s.Logger.PrintErrorLogs { + log.Println("error logging is on") + } +} + +func LoadJsonSchemaFile(path string) (*basic.Schema, error) { + var schema Schema + schema.Configs() + content, err := os.ReadFile(path) + if err != nil { + Logs.ERROR("Failed to load schema file", err) + return nil, err + } + err = json.Unmarshal(content, &schema) + if err != nil { + return nil, err + } + return schema.transformTov0(), nil +} + +func LoadMap(schemaMap interface{}) (*basic.Schema, error) { + var s *Schema + s.Configs() + jsonBytes, err := json.Marshal(schemaMap) + if err != nil { + Logs.ERROR("Schema should be valid json map[string]interface", err) + return nil, err + } + err = json.Unmarshal(jsonBytes, &s) + if err != nil { + return nil, err + } + return s.transformTov0(), nil +} + +func transformComponents(components map[string]Constant) map[basic.TargetKey]basic.Constant { + results := map[basic.TargetKey]basic.Constant{} + for key, constant := range components { + results[basic.TargetKey(key)] = basic.Constant{ + Attributes: constant.Attributes, + ErrMsg: constant.ErrMsg, + L10n: constant.L10n, + } + } + return results +} + +func (s *Schema) transformTov0() *basic.Schema { + var baseSchema basic.Schema + baseSchema.Version = s.Version + baseSchema.Locale = s.Locale + baseSchema.Logger = s.Logger + global := basic.Global{Headers: map[basic.TargetKey]basic.Field{}} + endpoints := map[basic.EndpointKey]basic.Endpoint{} + + for _, field := range s.Global.Headers { + global.Headers[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + + for path, endpoint := range s.Endpoints { + headers := map[basic.TargetKey]basic.Field{} + for _, field := range endpoint.Headers { + headers[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + body := map[basic.TargetKey]basic.Field{} + for _, field := range endpoint.Body { + body[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + + query := map[basic.TargetKey]basic.Field{} + for _, field := range endpoint.Body { + query[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + + endpoints[basic.EndpointKey(path)] = basic.Endpoint{ + Type: endpoint.Type, + Body: body, + Headers: headers, + Query: query, + } + } + + baseSchema.Global = global + baseSchema.Endpoints = endpoints + + return &baseSchema +} diff --git a/api/v2/schema.go b/api/v2/schema.go new file mode 100644 index 0000000..d20b500 --- /dev/null +++ b/api/v2/schema.go @@ -0,0 +1,164 @@ +package v2 + +import ( + "encoding/json" + basic "github.com/ashbeelghouri/jsonschematics/api/v0" + "github.com/ashbeelghouri/jsonschematics/errorHandler" + "github.com/ashbeelghouri/jsonschematics/utils" + "log" + "net/http" + "os" +) + +var Logs utils.Logger + +type Schema struct { + Version string `json:"version"` + Global Global `json:"global"` + Endpoints map[string]Endpoint `json:"endpoints"` + Locale string + Logger utils.Logger +} + +type Global struct { + Headers []Field `json:"headers"` +} + +type Endpoint struct { + Path string `json:"path"` + Type string `json:"type"` + Body []Field `json:"body"` + Headers []Field `json:"headers"` + Query []Field `json:"query"` +} + +type Field struct { + DependsOn []string + Key string `json:"target_key"` + Validators []Component `json:"validators"` + Operators []Component `json:"operators"` + L10n map[string]interface{} `json:"l10n"` + AdditionalInformation map[string]interface{} `json:"additional_information"` +} + +type Component struct { + Name string + Attributes map[string]interface{} + ErrMsg string + L10n map[string]interface{} +} + +func (s *Schema) Configs() { + Logs = s.Logger + if s.Logger.PrintDebugLogs { + log.Println("debugger is on") + } + if s.Logger.PrintErrorLogs { + log.Println("error logging is on") + } +} + +func LoadJsonSchemaFile(path string) (*basic.Schema, error) { + var schema Schema + schema.Configs() + content, err := os.ReadFile(path) + if err != nil { + Logs.ERROR("Failed to load schema file", err) + return nil, err + } + err = json.Unmarshal(content, &schema) + if err != nil { + return nil, err + } + return schema.transformTov0(), nil +} + +func LoadMap(schemaMap interface{}) (*basic.Schema, error) { + var s *Schema + s.Configs() + jsonBytes, err := json.Marshal(schemaMap) + if err != nil { + Logs.ERROR("Schema should be valid json map[string]interface", err) + return nil, err + } + err = json.Unmarshal(jsonBytes, &s) + if err != nil { + return nil, err + } + return s.transformTov0(), nil +} + +func transformComponents(components []Component) map[basic.TargetKey]basic.Constant { + validators := map[basic.TargetKey]basic.Constant{} + for _, validator := range components { + validators[basic.TargetKey(validator.Name)] = basic.Constant{ + Attributes: validator.Attributes, + ErrMsg: validator.ErrMsg, + L10n: validator.L10n, + } + } + return validators +} + +func (s *Schema) transformTov0() *basic.Schema { + var baseSchema basic.Schema + baseSchema.Version = s.Version + baseSchema.Locale = s.Locale + baseSchema.Logger = s.Logger + global := basic.Global{Headers: map[basic.TargetKey]basic.Field{}} + endpoints := map[basic.EndpointKey]basic.Endpoint{} + + for _, field := range s.Global.Headers { + global.Headers[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + + for path, endpoint := range s.Endpoints { + headers := map[basic.TargetKey]basic.Field{} + for _, field := range endpoint.Headers { + headers[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + body := map[basic.TargetKey]basic.Field{} + for _, field := range endpoint.Body { + body[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + + query := map[basic.TargetKey]basic.Field{} + for _, field := range endpoint.Body { + query[basic.TargetKey(field.Key)] = basic.Field{ + DependsOn: field.DependsOn, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + } + } + endpoints[basic.EndpointKey(path)] = basic.Endpoint{ + Type: endpoint.Type, + Body: body, + Headers: headers, + Query: query, + } + } + baseSchema.Global = global + baseSchema.Endpoints = endpoints + return &baseSchema +} + +func (s *Schema) ValidateRequest(r *http.Request) *errorHandler.Errors { + baseSchema := s.transformTov0() + return baseSchema.ValidateRequest(r) +} diff --git a/data/v0/schema.go b/data/v0/schema.go new file mode 100644 index 0000000..c3b56b1 --- /dev/null +++ b/data/v0/schema.go @@ -0,0 +1,366 @@ +package v0 + +import ( + "encoding/json" + "fmt" + "github.com/ashbeelghouri/jsonschematics/errorHandler" + "github.com/ashbeelghouri/jsonschematics/operators" + "github.com/ashbeelghouri/jsonschematics/utils" + "github.com/ashbeelghouri/jsonschematics/validators" + "log" + "os" + "strings" +) + +var Logs utils.Logger + +type TargetKey string + +type Schematics struct { + Schema Schema + Validators validators.Validators + Operators operators.Operators + Separator string + ArrayIdKey string + Locale string + Logging utils.Logger +} + +type Schema struct { + Version string `json:"version"` + Fields map[TargetKey]Field `json:"fields"` +} + +type Field struct { + DependsOn []string `json:"depends_on"` + DisplayName string `json:"display_name"` + Name string `json:"name"` + Type string `json:"type"` + IsRequired bool `json:"required"` + Description string `json:"description"` + Validators map[string]Constant `json:"validators"` + Operators map[string]Constant `json:"operators"` + L10n map[string]interface{} `json:"l10n"` + AdditionalInformation map[string]interface{} `json:"additional_information"` +} + +type Constant struct { + Attributes map[string]interface{} `json:"attributes"` + Error string `json:"error"` + L10n map[string]interface{} `json:"l10n"` +} + +func (s *Schematics) Configs() { + Logs = s.Logging + if s.Logging.PrintDebugLogs { + log.Println("debugger is on") + } + if s.Logging.PrintErrorLogs { + log.Println("error logging is on") + } + s.Validators.Logger = Logs + s.Operators.Logger = Logs +} + +func (s *Schematics) LoadJsonSchemaFile(path string) error { + s.Configs() + content, err := os.ReadFile(path) + if err != nil { + Logs.ERROR("Failed to load schema file", err) + return err + } + var schema Schema + err = json.Unmarshal(content, &schema) + if err != nil { + Logs.ERROR("Failed to unmarshall schema file", err) + return err + } + s.Schema = schema + s.Validators.BasicValidators() + s.Operators.LoadBasicOperations() + if s.Separator == "" { + s.Separator = "." + } + if s.Locale == "" { + s.Locale = "en" + } + return nil +} + +func (s *Schematics) LoadMap(schemaMap interface{}) error { + JSON, err := json.Marshal(schemaMap) + if err != nil { + Logs.ERROR("Schema should be valid json map[string]interface", err) + return err + } + var schema Schema + err = json.Unmarshal(JSON, &schema) + if err != nil { + Logs.ERROR("Invalid Schema", err) + return err + } + s.Schema = schema + s.Validators.BasicValidators() + s.Operators.LoadBasicOperations() + if s.Separator == "" { + s.Separator = "." + } + if s.Locale == "" { + s.Locale = "en" + } + return nil +} + +func (f *Field) Validate(value interface{}, allValidators map[string]validators.Validator, id *string) *errorHandler.Error { + var err errorHandler.Error + err.Value = value + err.ID = id + err.Validator = "unknown" + for name, constants := range f.Validators { + if name != "" { + err.Validator = name + } + if f.IsRequired && value == nil { + err.Validator = "Required" + err.AddMessage("en", "this is a required field") + return &err + } + + if utils.StringInStrings(strings.ToUpper(name), utils.ExcludedValidators) { + continue + } + + var fn validators.Validator + fn, exists := allValidators[name] + if !exists { + log.Println("does not exists here!!", name) + err.AddMessage("en", "validator not registered") + return &err + } + if err1 := fn(value, constants.Attributes); err1 != nil { + if !(constants.Error != "" && f.L10n != nil) { + err.AddMessage("en", err1.Error()) + return &err + } + for locale, msg := range f.L10n { + if msg == nil { + err.AddMessage(locale, msg.(string)) + } + } + return &err + } + } + return nil +} + +func (s *Schematics) makeFlat(data map[string]interface{}) *map[string]interface{} { + var dMap utils.DataMap + dMap.FlattenTheMap(data, "", s.Separator) + return &dMap.Data +} + +func (s *Schematics) deflate(data map[string]interface{}) map[string]interface{} { + return utils.DeflateMap(data, s.Separator) +} + +func (s *Schematics) Validate(jsonData interface{}) *errorHandler.Errors { + var baseError errorHandler.Error + var errs errorHandler.Errors + baseError.Validator = "validate-object" + dataBytes, err := json.Marshal(jsonData) + if err != nil { + baseError.AddMessage("en", "data is not valid json") + errs.AddError("whole-data", baseError) + return &errs + } + dataType, item := utils.IsValidJson(dataBytes) + if item == nil { + baseError.AddMessage("en", "invalid format provided for the data, can only be map[string]interface or []map[string]interface") + errs.AddError("whole-data", baseError) + return &errs + } + if dataType == "object" { + obj := item.(map[string]interface{}) + return s.ValidateObject(obj, nil) + } else { + arr := item.([]map[string]interface{}) + return s.ValidateArray(arr) + } +} + +func (s *Schematics) ValidateObject(jsonData map[string]interface{}, id *string) *errorHandler.Errors { + log.Println("validating the object") + var errorMessages errorHandler.Errors + var baseError errorHandler.Error + flatData := *s.makeFlat(jsonData) + var missingFromDependants []string + for target, field := range s.Schema.Fields { + baseError.Validator = "is-required" + matchingKeys := utils.FindMatchingKeys(flatData, string(target)) + if len(matchingKeys) == 0 { + if field.IsRequired { + baseError.AddMessage("en", "this field is required") + errorMessages.AddError(string(target), baseError) + } + continue + } + // check for dependencies + if len(field.DependsOn) > 0 { + missing := false + for _, d := range field.DependsOn { + matchDependsOn := utils.FindMatchingKeys(flatData, d) + if !(utils.StringInStrings(string(target), missingFromDependants) == false && len(matchDependsOn) > 0) { + log.Println(matchDependsOn) + baseError.Validator = "depends-on" + baseError.AddMessage("en", "this field depends on other values which do not exists") + errorMessages.AddError(string(target), baseError) + missingFromDependants = append(missingFromDependants, string(target)) + missing = true + break + } + } + if missing { + continue + } + } + + for key, value := range matchingKeys { + validationError := field.Validate(value, s.Validators.ValidationFns, id) + if validationError != nil { + errorMessages.AddError(key, *validationError) + } + } + + } + + if errorMessages.HasErrors() { + return &errorMessages + } + return nil +} + +func (s *Schematics) ValidateArray(jsonData []map[string]interface{}) *errorHandler.Errors { + log.Println("validating the array") + var errs errorHandler.Errors + i := 0 + for _, d := range jsonData { + var errorMessages *errorHandler.Errors + var dMap utils.DataMap + dMap.FlattenTheMap(d, "", s.Separator) + arrayId, exists := dMap.Data[s.ArrayIdKey] + if !exists { + arrayId = fmt.Sprintf("row-%d", i) + exists = true + } + + id := arrayId.(string) + errorMessages = s.ValidateObject(d, &id) + if errorMessages.HasErrors() { + log.Println("has errors", errorMessages.GetStrings("en", "%data\n")) + errs.MergeErrors(errorMessages) + } + i = i + 1 + } + + if errs.HasErrors() { + return &errs + } + return nil +} + +// operators + +func (f *Field) Operate(value interface{}, allOperations map[string]operators.Op) interface{} { + for operationName, operationConstants := range f.Operators { + customValidator, exists := allOperations[operationName] + if !exists { + Logs.ERROR("This operation does not exists in basic or custom operators", operationName) + return nil + } + result := customValidator(value, operationConstants.Attributes) + if result != nil { + value = result + } + } + return value +} + +func (s *Schematics) Operate(data interface{}) (interface{}, *errorHandler.Errors) { + var errorMessages errorHandler.Errors + var baseError errorHandler.Error + baseError.Validator = "operate-on-schema" + bytes, err := json.Marshal(data) + if err != nil { + Logs.ERROR("[operate] error converting the data into bytes", err) + baseError.AddMessage("en", "data is not valid json") + errorMessages.AddError("whole-data", baseError) + return nil, &errorMessages + } + + dataType, item := utils.IsValidJson(bytes) + if item == nil { + Logs.ERROR("[operate] error occurred when checking if this data is an array or object") + baseError.AddMessage("en", "can not convert the data into json") + errorMessages.AddError("whole-data", baseError) + return nil, &errorMessages + } + + if dataType == "object" { + obj := item.(map[string]interface{}) + results := s.OperateOnObject(obj) + if results != nil { + return results, nil + } else { + baseError.AddMessage("en", "operation on object unsuccessful") + errorMessages.AddError("whole-data", baseError) + return nil, &errorMessages + } + } else if dataType == "array" { + arr := item.([]map[string]interface{}) + results := s.OperateOnArray(arr) + if results != nil && len(*results) > 0 { + return results, nil + } else { + baseError.AddMessage("en", "operation on array unsuccessful") + errorMessages.AddError("whole-data", baseError) + return nil, &errorMessages + } + } + + return data, nil +} + +func (s *Schematics) OperateOnObject(data map[string]interface{}) *map[string]interface{} { + data = *s.makeFlat(data) + for target, field := range s.Schema.Fields { + matchingKeys := utils.FindMatchingKeys(data, string(target)) + for key, value := range matchingKeys { + data[key] = field.Operate(value, s.Operators.OpFunctions) + } + } + d := s.deflate(data) + return &d +} + +func (s *Schematics) OperateOnArray(data []map[string]interface{}) *[]map[string]interface{} { + var obj []map[string]interface{} + for _, d := range data { + results := s.OperateOnObject(d) + obj = append(obj, *results) + } + if len(obj) > 0 { + return &obj + } + return nil +} + +// General + +func (s *Schematics) MergeFields(sc2 *Schematics) *Schematics { + for target, field := range sc2.Schema.Fields { + if s.Schema.Fields[target].Type == "" { + s.Schema.Fields[target] = field + } + } + return s +} diff --git a/data/v1/schema.go b/data/v1/schema.go new file mode 100644 index 0000000..3dd3aeb --- /dev/null +++ b/data/v1/schema.go @@ -0,0 +1,143 @@ +package v1 + +import ( + "encoding/json" + v0 "github.com/ashbeelghouri/jsonschematics/data/v0" + "github.com/ashbeelghouri/jsonschematics/operators" + "github.com/ashbeelghouri/jsonschematics/utils" + "github.com/ashbeelghouri/jsonschematics/validators" + "log" + "os" +) + +var Logs utils.Logger + +type Schematics struct { + Schema Schema + Validators validators.Validators + Operators operators.Operators + Separator string + ArrayIdKey string + Locale string + Logging utils.Logger +} + +type Schema struct { + Version string `json:"version"` + Fields []Field `json:"fields"` +} + +type Field struct { + DependsOn []string `json:"depends_on"` + DisplayName string `json:"display_name"` + Name string `json:"name"` + TargetKey string `json:"target_key"` + Type string `json:"type"` + IsRequired bool `json:"required"` + Description string `json:"description"` + Validators map[string]Component `json:"validators"` + Operators map[string]Component `json:"operators"` + L10n map[string]interface{} `json:"l10n"` + AdditionalInformation map[string]interface{} `json:"additional_information"` +} + +type Component struct { + Attributes map[string]interface{} `json:"attributes"` + Error string `json:"error"` + L10n map[string]interface{} `json:"l10n"` +} + +func (s *Schematics) Configs() { + Logs = s.Logging + if s.Logging.PrintDebugLogs { + log.Println("debugger is on") + } + if s.Logging.PrintErrorLogs { + log.Println("error logging is on") + } + s.Validators.Logger = Logs + s.Operators.Logger = Logs + s.Validators.BasicValidators() + s.Operators.LoadBasicOperations() +} + +func LoadJsonSchemaFile(path string) (*v0.Schematics, error) { + var s *Schematics + s.Configs() + content, err := os.ReadFile(path) + if err != nil { + Logs.ERROR("Failed to load schema file", err) + return nil, err + } + var schema Schema + err = json.Unmarshal(content, &schema) + if err != nil { + Logs.ERROR("Failed to unmarshall schema file", err) + return nil, err + } + s.Schema = schema + + return transformSchematics(*s), nil +} + +func LoadMap(schemaMap interface{}) (*v0.Schematics, error) { + var s *Schematics + s.Configs() + jsonBytes, err := json.Marshal(schemaMap) + if err != nil { + Logs.ERROR("Schema should be valid json map[string]interface", err) + return nil, err + } + var schema Schema + err = json.Unmarshal(jsonBytes, &schema) + if err != nil { + Logs.ERROR("Failed to unmarshall schema file", err) + return nil, err + } + s.Schema = schema + return transformSchematics(*s), nil +} + +func transformSchematics(s Schematics) *v0.Schematics { + var baseSchematics v0.Schematics + baseSchematics.Locale = s.Locale + baseSchematics.Logging = s.Logging + baseSchematics.ArrayIdKey = s.ArrayIdKey + baseSchematics.Separator = s.Separator + baseSchematics.Validators = s.Validators + baseSchematics.Operators = s.Operators + baseSchematics.Schema = *transformSchema(s.Schema) + return &baseSchematics +} + +func transformSchema(schema Schema) *v0.Schema { + var baseSchema v0.Schema + baseSchema.Version = schema.Version + baseSchema.Fields = make(map[v0.TargetKey]v0.Field) + for _, field := range schema.Fields { + baseSchema.Fields[v0.TargetKey(field.TargetKey)] = v0.Field{ + DependsOn: field.DependsOn, + Name: field.Name, + Type: field.Name, + IsRequired: field.IsRequired, + Description: field.Description, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + AdditionalInformation: field.AdditionalInformation, + } + } + return &baseSchema +} + +func transformComponents(comp map[string]Component) map[string]v0.Constant { + con := make(map[string]v0.Constant) + for name, c := range comp { + con[name] = v0.Constant{ + Attributes: c.Attributes, + Error: c.Error, + L10n: c.L10n, + } + } + return con +} diff --git a/data/v2/schema.go b/data/v2/schema.go new file mode 100644 index 0000000..027718e --- /dev/null +++ b/data/v2/schema.go @@ -0,0 +1,144 @@ +package v2 + +import ( + "encoding/json" + v0 "github.com/ashbeelghouri/jsonschematics/data/v0" + "github.com/ashbeelghouri/jsonschematics/operators" + "github.com/ashbeelghouri/jsonschematics/utils" + "github.com/ashbeelghouri/jsonschematics/validators" + "log" + "os" +) + +var Logs utils.Logger + +type Schematics struct { + Schema Schema + Validators validators.Validators + Operators operators.Operators + Separator string + ArrayIdKey string + Locale string + Logging utils.Logger +} + +type Schema struct { + Version string `json:"version"` + Fields []Field `json:"fields"` +} + +type Field struct { + DependsOn []string `json:"depends_on"` + DisplayName string `json:"display_name"` + Name string `json:"name"` + TargetKey string `json:"target_key"` + Type string `json:"type"` + IsRequired bool `json:"required"` + Description string `json:"description"` + Validators []Component `json:"validators"` + Operators []Component `json:"operators"` + L10n map[string]interface{} `json:"l10n"` + AdditionalInformation map[string]interface{} `json:"additional_information"` +} + +type Component struct { + Name string `json:"name"` + Attributes map[string]interface{} `json:"attributes"` + Error string `json:"error"` + L10n map[string]interface{} `json:"l10n"` +} + +func (s *Schematics) Configs() { + Logs = s.Logging + if s.Logging.PrintDebugLogs { + log.Println("debugger is on") + } + if s.Logging.PrintErrorLogs { + log.Println("error logging is on") + } + s.Validators.Logger = Logs + s.Operators.Logger = Logs + s.Validators.BasicValidators() + s.Operators.LoadBasicOperations() +} + +func LoadJsonSchemaFile(path string) (*v0.Schematics, error) { + var s Schematics + s.Configs() + content, err := os.ReadFile(path) + if err != nil { + Logs.ERROR("Failed to load schema file", err) + return nil, err + } + var schema Schema + err = json.Unmarshal(content, &schema) + if err != nil { + Logs.ERROR("Failed to unmarshall schema file", err) + return nil, err + } + s.Schema = schema + + return transformSchematics(s), nil +} + +func LoadMap(schemaMap interface{}) (*v0.Schematics, error) { + var s Schematics + s.Configs() + jsonBytes, err := json.Marshal(schemaMap) + if err != nil { + Logs.ERROR("Schema should be valid json map[string]interface", err) + return nil, err + } + var schema Schema + err = json.Unmarshal(jsonBytes, &schema) + if err != nil { + Logs.ERROR("Failed to unmarshall schema file", err) + return nil, err + } + s.Schema = schema + return transformSchematics(s), nil +} + +func transformSchematics(s Schematics) *v0.Schematics { + var baseSchematics v0.Schematics + baseSchematics.Locale = s.Locale + baseSchematics.Logging = s.Logging + baseSchematics.ArrayIdKey = s.ArrayIdKey + baseSchematics.Separator = s.Separator + baseSchematics.Validators = s.Validators + baseSchematics.Operators = s.Operators + baseSchematics.Schema = *transformSchema(s.Schema) + return &baseSchematics +} + +func transformSchema(schema Schema) *v0.Schema { + var baseSchema v0.Schema + baseSchema.Version = schema.Version + baseSchema.Fields = make(map[v0.TargetKey]v0.Field) + for _, field := range schema.Fields { + baseSchema.Fields[v0.TargetKey(field.TargetKey)] = v0.Field{ + DependsOn: field.DependsOn, + Name: field.Name, + Type: field.Name, + IsRequired: field.IsRequired, + Description: field.Description, + Validators: transformComponents(field.Validators), + Operators: transformComponents(field.Operators), + L10n: field.L10n, + AdditionalInformation: field.AdditionalInformation, + } + } + return &baseSchema +} + +func transformComponents(comp []Component) map[string]v0.Constant { + con := make(map[string]v0.Constant) + for _, c := range comp { + con[c.Name] = v0.Constant{ + Attributes: c.Attributes, + Error: c.Error, + L10n: c.L10n, + } + } + return con +} diff --git a/error-messages.go b/error-messages.go deleted file mode 100644 index 5c81dc8..0000000 --- a/error-messages.go +++ /dev/null @@ -1,101 +0,0 @@ -package jsonschematics - -import ( - "errors" - "fmt" - "strings" -) - -// make error format same for the arrays as well as objects - -type ErrorMessage struct { - Message string - Validator string - Target string - Value interface{} - ID interface{} -} - -type ErrorMessages struct { - Messages []ErrorMessage -} - -func (em *ErrorMessages) AddError(validator string, target string, err string, value interface{}) { - logs.DEBUG("adding new error message", err) - em.Messages = append(em.Messages, ErrorMessage{Message: err, Validator: validator, Target: target, Value: value, ID: nil}) -} - -func (em *ErrorMessages) AddErrorsForArray(validator string, target string, err string, value interface{}, id interface{}) { - logs.DEBUG("adding error for arrays", err, "on id: ", id) - em.Messages = append(em.Messages, ErrorMessage{Message: err, Validator: validator, Target: target, Value: value, ID: id}) -} - -func (em *ErrorMessages) HaveErrors() bool { - return len(em.Messages) > 0 -} - -func (em *ErrorMessages) ExtractAsStrings(format string) *[]string { - logs.DEBUG("extracting errors as a string") - var errs []string - if !em.HaveErrors() { - return nil - } - if format == "" { - format = "validation error %message for %target with validation on %validator, provided: %value" - } - - for _, msg := range em.Messages { - value := fmt.Sprint(msg.Value) - var id *string - if msg.ID != nil { - msgID := fmt.Sprint(msg.ID) - id = &msgID - } else { - id = nil - } - errs = append(errs, FormatError(id, msg.Message, msg.Target, msg.Validator, value, format)) - } - - return &errs -} - -func (em *ErrorMessages) ExtractAsErrors(format string) []error { - logs.DEBUG("extracting errors as array of errors") - if !em.HaveErrors() { - return nil - } - var errs []error - - messages := em.ExtractAsStrings(format) - for _, msg := range *messages { - errs = append(errs, errors.New(msg)) - } - - return errs -} - -/* - format: "validation error %message for %target with validating with %validation, provided: %value" -*/ - -func (em *ErrorMessages) HaveSingleError(format string, appendWith string) error { - logs.DEBUG("joining all the errors to represent only one error") - - if !em.HaveErrors() { - return nil - } - err := em.ExtractAsStrings(format) - if err != nil && !(len(*err) > 1) { - return errors.New(strings.Join(*err, "")) - } else if err != nil && len(*err) > 1 { - if appendWith == "" { - appendWith = "," - } - return errors.New(strings.Join(*err, appendWith)) - } else if err != nil { - logs.ERROR("[code=1] We are unable to determine the error :::: >>>> ", err) - return errors.New("unable to determine the error") - } - - return nil -} diff --git a/errorHandler/messages.go b/errorHandler/messages.go new file mode 100644 index 0000000..5fd6ca1 --- /dev/null +++ b/errorHandler/messages.go @@ -0,0 +1,151 @@ +package errorHandler + +import ( + "errors" + "fmt" + "github.com/ashbeelghouri/jsonschematics/utils" + "log" + "strings" +) + +type Locale string +type Target string + +type Error struct { + DataTarget string + Message map[Locale]string + Validator string + Value interface{} + ID interface{} + Data map[string]interface{} +} + +type Errors struct { + Messages map[Target]Error +} + +func (e *Error) AddMessage(local string, message string) { + if e.Message == nil { + e.Message = make(map[Locale]string) + } + e.Message[Locale(local)] = message +} +func (e *Error) updateData(target string) Target { + var t string + convertedID, ok := e.ID.(*string) + + if ok && e.ID != nil { + t = fmt.Sprintf("%s:%s", *convertedID, target) + } else { + t = fmt.Sprintf("%s", target) + } + e.Data = make(map[string]interface{}) + e.Data["target"] = t + e.Data["messages"] = e.Message + e.Data["validator"] = e.Validator + e.Data["value"] = e.Value + e.Data["value"] = e.Value + e.Data["id"] = e.ID + return Target(t) +} + +func (em *Errors) AddError(target string, err Error) { + if em.Messages == nil { + em.Messages = make(map[Target]Error) + } + t := err.updateData(target) + em.Messages[t] = err +} + +func (em *Errors) HasErrors() bool { + if em != nil { + for _, err := range em.Messages { + if len(err.Message) > 0 { + return true + } + } + } + return false +} + +func (em *Errors) GetStrings(locale Locale, format string) *[]string { + var errs []string + if !em.HasErrors() { + return nil + } + if format == "" { + format = "validation error %message for %target with validation on %validator, provided: %value: {%data}" + } + + for target, msg := range em.Messages { + log.Println(target) + message, ok := msg.Message[locale] + if !ok { + continue + } + value := fmt.Sprint(msg.Value) + var id *string + if msg.ID != nil { + msgID := fmt.Sprint(msg.ID) + id = &msgID + } else { + id = nil + } + errs = append(errs, utils.FormatError(id, message, string(target), msg.Validator, value, format, &msg.Data)) + } + return &errs +} + +func (em *Errors) GetErrors(locale Locale, format string) *[]error { + var errs []error + if !em.HasErrors() { + return nil + } + if format == "" { + format = "validation error %message for %target with validation on %validator, provided: %value" + } + + for target, msg := range em.Messages { + log.Println(target) + message, ok := msg.Message[locale] + if !ok { + continue + } + value := fmt.Sprint(msg.Value) + var id *string + if msg.ID != nil { + msgID := fmt.Sprint(msg.ID) + id = &msgID + } else { + id = nil + } + errs = append(errs, errors.New(utils.FormatError(id, message, string(target), msg.Validator, value, format, &msg.Data))) + } + return &errs +} + +func (em *Errors) GetJoinedError(locale string, singleErrorFormat string, appendWith string) error { + errorStrings := em.GetStrings(Locale(locale), singleErrorFormat) + if errorStrings == nil { + return nil + } + if errorStrings != nil && !(len(*errorStrings) > 1) { + return errors.New(strings.Join(*errorStrings, "")) + } + if appendWith == "" { + appendWith = "," + } + return errors.New(strings.Join(*errorStrings, appendWith)) +} + +func (em *Errors) MergeErrors(em2 *Errors) { + if !em2.HasErrors() { + return + } + if em.Messages == nil { + em.Messages = make(map[Target]Error) + } + for target, err := range em2.Messages { + em.Messages[target] = err + } +} diff --git a/schema.go b/schema.go deleted file mode 100644 index 8944c14..0000000 --- a/schema.go +++ /dev/null @@ -1,408 +0,0 @@ -package jsonschematics - -import ( - "encoding/json" - "errors" - "fmt" - "log" - "os" - - "github.com/ashbeelghouri/jsonschematics/operators" - "github.com/ashbeelghouri/jsonschematics/utils" - "github.com/ashbeelghouri/jsonschematics/validators" -) - -var logs utils.Logger - -type Schematics struct { - Schema Schema - Validators validators.Validators - Operators operators.Operators - Separator string - ArrayIdKey string - Locale string - Logging utils.Logger -} - -type Schema struct { - Version string `json:"version"` - Fields []Field `json:"fields"` -} - -type Field struct { - DependsOn []string `json:"depends_on"` - Name string `json:"name"` - Type string `json:"type"` - TargetKey string `json:"target_key"` - Description string `json:"description"` - Validators map[string]Constant `json:"validators"` - Operators map[string]Constant `json:"operators"` - L10n map[string]interface{} `json:"l10n"` - AdditionalInformation map[string]interface{} `json:"additional_information"` -} - -type Constant struct { - Attributes map[string]interface{} `json:"attributes"` - ErrMsg string `json:"error"` - L10n map[string]interface{} `json:"l10n"` -} - -func (s *Schematics) Configs() { - logs = s.Logging - if s.Logging.PrintDebugLogs { - log.Println("debugger is on") - } - if s.Logging.PrintErrorLogs { - log.Println("error logging is on") - } - s.Validators.Logger = logs - s.Operators.Logger = logs -} - -func (s *Schematics) LoadSchemaFromFile(path string) error { - s.Configs() - content, err := os.ReadFile(path) - if err != nil { - logs.ERROR("Failed to load schema file", err) - return err - } - schema, err := HandleSchemaVersions(content) - if err != nil { - return err - } - s.Schema = *schema - s.Validators.BasicValidators() - s.Operators.LoadBasicOperations() - if s.Separator == "" { - s.Separator = "." - } - if s.Locale == "" { - s.Locale = "en" - } - return nil -} - -func (s *Schematics) LoadSchemaFromMap(m *map[string]interface{}) error { - s.Configs() - jsonData, err := json.Marshal(m) - if err != nil { - logs.ERROR("Failed to load schema file", err) - return nil - } - schema, err := HandleSchemaVersions(jsonData) - s.Schema = *schema - if err != nil { - logs.ERROR("Failed to load schema file", err) - return err - } - - s.Validators.BasicValidators() - logs.DEBUG("basic validator loaded") - s.Operators.LoadBasicOperations() - if s.Separator == "" { - logs.DEBUG("separator set to '.'") - s.Separator = "." - } - if s.Locale == "" { - logs.DEBUG("locale set to 'en'") - s.Locale = "en" - } - logs.DEBUG("loaded the file successfully") - return nil -} - -func (f *Field) Validate(value interface{}, allValidators map[string]validators.Validator, locale *string) (*string, error) { - logs.DEBUG("validation is being performed on:", f.Name, fmt.Sprintf("[%s]", f.TargetKey)) - nameOfValidator := "unknown" - for name, constants := range f.Validators { - if name != "" { - nameOfValidator = name - } else { - logs.ERROR("name of the validator is not defined!", name, "constants are:", constants) - } - - logs.DEBUG("name of the validator is:", name) - if stringExists(name, []string{"Exist", "Required", "IsRequired"}) { - logs.DEBUG("skipping required validator as it has already been checked out") - continue - } - if customValidator, exists := allValidators[name]; exists { - logs.DEBUG("validating with", name) - if err := customValidator(value, constants.Attributes); err != nil { - logs.DEBUG("we have an error from our validator", err) - if constants.ErrMsg != "" { - logs.ERROR("Validation Error", err) - var localeError = constants.ErrMsg - if locale != nil && *locale != "" && *locale != "en" { - logs.DEBUG("locale is loaded and is configured") - _, ok := f.L10n[*locale].(string) - if !ok { - localeError = constants.ErrMsg - } else { - localeError = f.L10n[*locale].(string) - } - } - logs.DEBUG("custom error from the schema is being sent") - return &nameOfValidator, errors.New(localeError) - } - logs.DEBUG("sending the error from validation function") - return &nameOfValidator, err - } - } else { - logs.DEBUG("this validator is not registered", &nameOfValidator) - return &nameOfValidator, errors.New("validator not registered") - } - } - return &nameOfValidator, nil -} - -func (f *Field) Operate(value interface{}, allOperations map[string]operators.Op) interface{} { - logs.DEBUG("operation is being performed on:", f.Name, fmt.Sprintf("[%s]", f.TargetKey)) - for operationName, operationConstants := range f.Operators { - logs.DEBUG("performing operation:", operationName) - result := f.PerformOperation(value, operationName, allOperations, operationConstants) - if result != nil { - logs.ERROR("operation successful", result) - value = result - } - } - logs.DEBUG("all operations are performed on", f.TargetKey) - return value -} - -func (f *Field) PerformOperation(value interface{}, operation string, allOperations map[string]operators.Op, constants Constant) interface{} { - customValidator, exists := allOperations[operation] - if !exists { - logs.ERROR("This operation does not exists in basic or custom operators", operation) - return nil - } - result := customValidator(value, constants.Attributes) - return *result -} - -func (s *Schematics) makeFlat(data map[string]interface{}) *map[string]interface{} { - var dMap DataMap - dMap.FlattenTheMap(data, "", s.Separator) - return &dMap.Data -} - -func (s *Schematics) deflate(data map[string]interface{}) map[string]interface{} { - return DeflateMap(data, s.Separator) -} - -func (s *Schematics) Validate(data interface{}) *ErrorMessages { - var upperLevelErrors ErrorMessages - - bytes, err := json.Marshal(data) - if err != nil { - logs.ERROR("error converting the data into bytes", err) - upperLevelErrors.AddError("BYTES", "MARSHAL DATA", err.Error(), "validate") - return &upperLevelErrors - } - - dataType, item := canConvert(bytes) - if item == nil { - logs.ERROR("error occurred when checking if this data is an array or object") - errMsg := "unknown error" - upperLevelErrors.AddError("BYTES", "DETERMINE_IS_JSON", errMsg, "validate") - return &upperLevelErrors - } - logs.DEBUG("data type is:", dataType) - if dataType == "object" { - logs.DEBUG("data is an object") - if obj, ok := item.(map[string]interface{}); ok { - return s.validateSingle(obj) - } else { - logs.ERROR("unable to recognize the object for validations") - upperLevelErrors.AddError("BYTES", "IS UNKNOWN TYPE", "unable to recognize the object for validation", "validate") - return &upperLevelErrors - } - - } else if dataType == "array" { - logs.DEBUG("data is an array") - if obj, ok := item.([]map[string]interface{}); ok { - return s.validateArray(obj) - } else { - logs.ERROR("unable to recognize the array for validations") - upperLevelErrors.AddError("BYTES", "IS UNKNOWN TYPE", "unable to recognize the array for validation", "validate") - return &upperLevelErrors - } - } else { - upperLevelErrors.AddError("BYTES", "IS UNKNOWN TYPE", "MUST PROVIDE VALID OBJ map[string]interface{} OR []map[string]interface{}", "validate") - return &upperLevelErrors - } -} - -func (s *Schematics) validateSingle(d map[string]interface{}) *ErrorMessages { - var errs ErrorMessages - var missingFromDependants []string - data := *s.makeFlat(d) - for _, field := range s.Schema.Fields { - allKeys := GetConstantMapKeys(field.Validators) - matchingKeys := FindMatchingKeys(data, field.TargetKey) - logs.DEBUG("matching keys to the target", matchingKeys) - if len(matchingKeys) == 0 { - logs.DEBUG("matching key not found") - if stringsInSlice(allKeys, []string{"MustHave", "Exist", "Required", "IsRequired"}) { - errs.AddError("IsRequired", field.TargetKey, "is required", "") - } - continue - } else if len(field.DependsOn) > 0 { - logs.DEBUG("checking for the pre-requisites") - for _, d := range field.DependsOn { - matchDependsOn := FindMatchingKeys(data, d) - if len(matchDependsOn) < 1 || StringLikePatterns(d, missingFromDependants) { - logs.ERROR("the field on which this field depends on not found", matchDependsOn) - errs.AddError("Depends On", field.TargetKey, "this value depends on other values which do not exists", d) - missingFromDependants = append(missingFromDependants, field.TargetKey) - break - } - } - - } else { - for key, value := range matchingKeys { - validator, err := field.Validate(value, s.Validators.ValidationFns, &s.Locale) - if err != nil { - logs.ERROR("validator error occurred:", err) - var fieldName = key - if s.Locale != "" && s.Locale != "en" { - logs.DEBUG("locale is different than en:", s.Locale) - fieldNameLocales, ok := field.L10n["name"].(map[string]interface{}) - if ok { - _, ok = fieldNameLocales[s.Locale].(string) - if ok { - fieldName = fieldNameLocales[s.Locale].(string) - } - } - } - if validator != nil { - logs.DEBUG("this is a validation error", err, "adding to the errors") - errs.AddError(*validator, fieldName, err.Error(), value) - } - } - } - } - } - if errs.HaveErrors() { - return &errs - } - logs.DEBUG("found no errors") - return nil -} - -func (s *Schematics) validateArray(data []map[string]interface{}) *ErrorMessages { - var errs ErrorMessages - i := 0 - for _, d := range data { - var errorMessages *ErrorMessages - i = i + 1 - var dMap DataMap - dMap.FlattenTheMap(d, "", s.Separator) - arrayId, exists := dMap.Data[s.ArrayIdKey] - if !exists { - logs.DEBUG("array does not have ids defined, so defining them by row number") - arrayId = fmt.Sprintf("row-%d", i) - exists = true - } - logs.DEBUG("arrayID", arrayId) - errorMessages = s.validateSingle(d) - if errorMessages != nil { - for _, msg := range errorMessages.Messages { - errs.AddErrorsForArray(msg.Validator, msg.Target, msg.Message, msg.Value, arrayId) - } - } - } - - if len(errs.Messages) > 0 { - return &errs - } - return nil -} - -func (s *Schematics) Operate(data interface{}) interface{} { - var upperLevelErrors ErrorMessages - bytes, err := json.Marshal(data) - if err != nil { - logs.ERROR("[operate] error converting the data into bytes", err) - upperLevelErrors.AddError("BYTES", "MARSHAL DATA", err.Error(), "operate") - return &upperLevelErrors - } - - dataType, item := canConvert(bytes) - if item == nil { - logs.ERROR("[operate] error occurred when checking if this data is an array or object") - errMsg := "unknown error" - upperLevelErrors.AddError("BYTES", "DETERMINE_IS_JSON", errMsg, "operate") - return &upperLevelErrors - } - - if dataType == "object" { - logs.DEBUG("[operate] data is an object") - if obj, ok := item.(map[string]interface{}); ok { - return s.performOperationSingle(obj) - } else { - logs.ERROR("unable to recognize the object for operations") - upperLevelErrors.AddError("BYTES", "IS UNKNOWN TYPE", "unable to recognize the object for operation", "operate") - return &upperLevelErrors - } - - } else if dataType == "array" { - logs.DEBUG("[operate] data is an array") - if obj, ok := item.([]map[string]interface{}); ok { - return s.performOperationArray(obj) - } else { - logs.ERROR("unable to recognize the array for operations") - upperLevelErrors.AddError("BYTES", "IS UNKNOWN TYPE", "unable to recognize the array for operation", "operate") - return &upperLevelErrors - } - } else { - upperLevelErrors.AddError("BYTES", "IS UNKNOWN TYPE", "MUST PROVIDE VALID OBJ map[string]interface{} OR []map[string]interface{}", "operate") - return &upperLevelErrors - } -} - -func (s *Schematics) performOperationSingle(data map[string]interface{}) *map[string]interface{} { - logs.DEBUG("performing all operations") - data = *s.makeFlat(data) - for _, field := range s.Schema.Fields { - matchingKeys := FindMatchingKeys(data, field.TargetKey) - logs.DEBUG("matching keys for operations", matchingKeys) - for key, value := range matchingKeys { - data[key] = field.Operate(value, s.Operators.OpFunctions) - logs.DEBUG("data after operation", data[key]) - } - } - d := s.deflate(data) - logs.DEBUG("deflated data", d) - return &d -} - -func (s *Schematics) performOperationArray(data []map[string]interface{}) *[]map[string]interface{} { - var obj []map[string]interface{} - for _, d := range data { - results := s.performOperationSingle(d) - obj = append(obj, *results) - } - if len(obj) > 0 { - return &obj - } - return nil -} - -func (s *Schematics) MergeFields(sc2 *Schematics, duplicateKeys bool) *Schematics { - if duplicateKeys { - s.Schema.Fields = append(s.Schema.Fields, sc2.Schema.Fields...) - } else { - allKeys := map[string]bool{} - for _, f := range s.Schema.Fields { - allKeys[f.TargetKey] = true - } - for _, field := range sc2.Schema.Fields { - if !allKeys[field.TargetKey] { - s.Schema.Fields = append(s.Schema.Fields, field) - } - } - } - - return s -} diff --git a/test-data/api.json b/test-data/api.json new file mode 100644 index 0000000..e69de29 diff --git a/json/arr-data.json b/test-data/arr-data.json similarity index 100% rename from json/arr-data.json rename to test-data/arr-data.json diff --git a/json/arr-inside-arr-obj-data.json b/test-data/arr-inside-arr-obj-data.json similarity index 100% rename from json/arr-inside-arr-obj-data.json rename to test-data/arr-inside-arr-obj-data.json diff --git a/json/arr-inside-obj-data.json b/test-data/arr-inside-obj-data.json similarity index 100% rename from json/arr-inside-obj-data.json rename to test-data/arr-inside-obj-data.json diff --git a/json/arr-inside-obj-schema.json b/test-data/arr-inside-obj-schema.json similarity index 100% rename from json/arr-inside-obj-schema.json rename to test-data/arr-inside-obj-schema.json diff --git a/json/data.json b/test-data/data.json similarity index 100% rename from json/data.json rename to test-data/data.json diff --git a/test-data/data/direct/v2/example-2.json b/test-data/data/direct/v2/example-2.json new file mode 100644 index 0000000..695aab4 --- /dev/null +++ b/test-data/data/direct/v2/example-2.json @@ -0,0 +1,44 @@ +[ + { + "user": { + "id": 1, + "profile": { + "name": { + "first": "ghouri", + "last": "ghouri michael" + }, + "phone": "03175043399", + "age": 10, + "email": "ashbeelghouri@protonmail.com" + } + } + }, + { + "user": { + "id": 2, + "profile": { + "name": { + "first": "nabeel", + "last": "francis" + }, + "phone": "92039485729384", + "age": 12, + "email": "nabeel@protonmail.com" + } + } + }, + { + "user": { + "id": 3, + "profile": { + "name": { + "first": "dummY", + "last": "user one" + }, + "phone": "889900993847", + "age": 10, + "email": "dummy.one@protonmail.com" + } + } + } +] \ No newline at end of file diff --git a/test-data/data/direct/v2/example.json b/test-data/data/direct/v2/example.json new file mode 100644 index 0000000..a8a8927 --- /dev/null +++ b/test-data/data/direct/v2/example.json @@ -0,0 +1,14 @@ +{ + "user": { + "id": 2, + "profile": { + "name": { + "first": "nabeel", + "last": "francis" + }, + "phone": "92039485729384", + "age": 12, + "email": "nabeel@protonmail.com" + } + } + } \ No newline at end of file diff --git a/json/schema-v1.1.json b/test-data/schema-v1.1.json similarity index 100% rename from json/schema-v1.1.json rename to test-data/schema-v1.1.json diff --git a/json/schema.json b/test-data/schema.json similarity index 100% rename from json/schema.json rename to test-data/schema.json diff --git a/test-data/schema/api/v1/example.json b/test-data/schema/api/v1/example.json new file mode 100644 index 0000000..edd7a33 --- /dev/null +++ b/test-data/schema/api/v1/example.json @@ -0,0 +1,25 @@ +{ + "version": "1.0", + "global": { + "headers": [{ + "target_key": "", + "validators": {}, + "operators": {} + }] + }, + "endpoints": [{ + "path": "", + "method": "get", + "body": [{ + "target_key": "", + "required": false, + "validators": {}, + "operators": {} + }], + "headers": [{ + "target_key": "", + "validators": {}, + "operators": {} + }] + }] +} \ No newline at end of file diff --git a/test-data/schema/direct/v0/example-1.json b/test-data/schema/direct/v0/example-1.json new file mode 100644 index 0000000..e69de29 diff --git a/test-data/schema/direct/v2/example-1.json b/test-data/schema/direct/v2/example-1.json new file mode 100644 index 0000000..7648128 --- /dev/null +++ b/test-data/schema/direct/v2/example-1.json @@ -0,0 +1,83 @@ +{ + "version": "1.1", + "fields": [{ + "name": "", + "display_name": "", + "required": false, + "depends_on": [], + "target_key": "user.profile.name.first", + "description": "", + "validators": [{ + "name": "IsString" + }, { + "name": "IsRequired" + }, { + "name": "MaxLengthAllowed", + "attributes": { + "max": 20 + }, + "error": "user's first name should have maximum 20 characters", + "l10n": { + "ar": "سي شسيشسشسيش شسيشس شسيضصثضصث قفلربل ٦ع لبلا ثفق" + } + }], + "operators": [{ + "name": "Capitalize" + }], + "l10n": { + "description": { + "locale": { + "ar": "" + } + } + } + }, { + "depends_on": [], + "required": false, + "target_key": "user.profile.name.last", + "description": "", + "validators": [{ + "name": "IsString" + }, { + "name": "IsRequired" + }, { + "name": "MinLengthAllowed", + "attributes": { + "min": 10 + }, + "error": "user's last name should have minimum 10 characters" + }], + "operators": [{ + "name": "Capitalize" + }] + }, { + "depends_on": [], + "required": false, + "target_key": "user.profile.age", + "description": "", + "validators": [{ + "name": "IsNumber" + }, { + "name": "IsRequired" + }, { + "name": "MaxAllowed", + "attributes": { + "max": 20 + }, + "error": "user's age should not be greater than 20" + }] + }, { + "depends_on": ["user.profile.name.first", "user.profile.name.last"], + "target_key": "user.profile.email", + "description": "", + "validators": [{ + "name": "IsString" + }, { + "name": "IsEmail", + "error": "user's first name should have maximum 20 characters" + }], + "operators": [{ + "name": "Capitalize" + }] + }] +} \ No newline at end of file diff --git a/helpers.go b/utils/helpers.go similarity index 50% rename from helpers.go rename to utils/helpers.go index 154bc9c..6c700f1 100644 --- a/helpers.go +++ b/utils/helpers.go @@ -1,50 +1,23 @@ -package jsonschematics +package utils import ( "encoding/json" "errors" - "fmt" - "os" "reflect" "regexp" "strconv" "strings" ) -type DataMap struct { - Data map[string]interface{} -} - -var basicSchemaVersions = []string{ - "1", - "1.0", -} - -type BaseSchemaInfo struct { - Version string -} - -type Schema1o1 struct { - Version string - Fields []Field1o1 +var ExcludedValidators = []string{ + "REQUIRED", + "IS_REQUIRED", + "IS-REQUIRED", + "ISREQUIRED", } -type Field1o1 struct { - DependsOn []string `json:"depends_on"` - Name string `json:"name"` - Type string `json:"type"` - TargetKey string `json:"target_key"` - Description string `json:"description"` - Validators []ValidOptn1o1 `json:"validators"` - Operators []ValidOptn1o1 `json:"operators"` - L10n map[string]interface{} `json:"l10n"` - AdditionalInformation map[string]interface{} `json:"additional_information"` -} - -type ValidOptn1o1 struct { - Name string `json:"name"` - Attr map[string]interface{} `json:"attributes"` - Err string `json:"error"` +type DataMap struct { + Data map[string]interface{} } func (d *DataMap) FlattenTheMap(data map[string]interface{}, prefix string, separator string) { @@ -90,7 +63,7 @@ func DeflateMap(data map[string]interface{}, separator string) map[string]interf for i := 0; i < len(keys)-1; i++ { key := keys[i] - if nextKeyIsIndex := i < len(keys)-1 && isNumeric(keys[i+1]); nextKeyIsIndex { + if nextKeyIsIndex := i < len(keys)-1 && IsNumeric(keys[i+1]); nextKeyIsIndex { if _, exists := subMap[key]; !exists { subMap[key] = []interface{}{} } @@ -123,46 +96,51 @@ func DeflateMap(data map[string]interface{}, separator string) map[string]interf return result } -func isNumeric(s string) bool { +func IsNumeric(s string) bool { _, err := strconv.Atoi(s) return err == nil } -func stringExists(s string, slice []string) bool { +func StringInStrings(str string, slice []string) bool { for _, item := range slice { - if item == s { + if item == str { return true } } return false } -func stringsInSlice(s []string, slice []string) bool { +func StringsInSlice(s []string, slice []string) bool { for _, str := range s { - if stringExists(str, slice) { + if StringInStrings(str, slice) { return true } } return false } -func isJSON(content []byte) (string, error) { - var result interface{} - if err := json.Unmarshal(content, &result); err != nil { - return "", err - } +func ConvertKeyToRegex(key string) string { + // Escape special regex characters in the key except for * + escapedKey := regexp.QuoteMeta(key) + // Replace * with \d+ to match array indices + regexPattern := strings.ReplaceAll(escapedKey, `\*`, `\d+`) + // Add start and end of line anchors + regexPattern = "^" + regexPattern + "$" + return regexPattern +} - switch result.(type) { - case map[string]interface{}: - return "object", nil - case []interface{}: - return "array", nil - default: - return "unknown", fmt.Errorf("content is neither a JSON object nor array") +func FindMatchingKeys(data map[string]interface{}, keyPattern string) map[string]interface{} { + matchingKeys := make(map[string]interface{}) + re := regexp.MustCompile(ConvertKeyToRegex(keyPattern)) + for key, value := range data { + if re.MatchString(key) { + matchingKeys[key] = value + } } + return matchingKeys } -func canConvert(content []byte) (string, interface{}) { +func IsValidJson(content []byte) (string, interface{}) { var arr []map[string]interface{} var obj map[string]interface{} const IsArray = "array" @@ -175,18 +153,38 @@ func canConvert(content []byte) (string, interface{}) { if err := json.Unmarshal(content, &obj); err == nil { return IsObject, obj } - return "bad-format", nil + return "invalid format", nil } -func GetJson(path string) (interface{}, error) { - content, err := os.ReadFile(path) - if err != nil { - logs.ERROR("Failed to load schema file: %v", err) - return nil, err +func GetPathRegex(path string) string { + path = strings.ReplaceAll(path, "*", ".*") + path = strings.ReplaceAll(path, ":", "[^/]+") + return "^" + path + "$" +} + +func FormatError(id *string, message string, target string, validator string, value string, format string, data *map[string]interface{}) string { + errorMessage := strings.Replace(format, "%message", message, -1) + errorMessage = strings.Replace(errorMessage, "%target", target, -1) + errorMessage = strings.Replace(errorMessage, "%validator", validator, -1) + if id != nil { + errorMessage = strings.Replace(errorMessage, "%id", validator, -1) } - jsonType, err := isJSON(content) - if err != nil { - return nil, err + if data != nil { + marshalled, err := json.Marshal(data) + if err == nil { + d := string(marshalled) + errorMessage = strings.Replace(errorMessage, "%data", d, -1) + } + + } + errorMessage = strings.Replace(errorMessage, "%value", value, -1) + return errorMessage +} + +func BytesToMap(content []byte) (interface{}, error) { + jsonType, obj := IsValidJson(content) + if obj == nil { + return nil, errors.New("invalid json file content found") } switch jsonType { case "object": @@ -204,14 +202,12 @@ func GetJson(path string) (interface{}, error) { default: return nil, errors.New("unknown json file content found") } - } func getJsonFileAsMap(content []byte) (map[string]interface{}, error) { var data map[string]interface{} err := json.Unmarshal(content, &data) if err != nil { - logs.ERROR("[GetJsonFileAsMap] Failed to parse the data", err) return nil, err } return data, nil @@ -221,125 +217,7 @@ func getJsonFileAsMapArray(content []byte) ([]map[string]interface{}, error) { var data []map[string]interface{} err := json.Unmarshal(content, &data) if err != nil { - logs.ERROR("[GetJsonFileAsMapArray] Failed to parse the data: %v", err) return nil, err } return data, nil } - -func ConvertKeyToRegex(key string) string { - // Escape special regex characters in the key except for * - escapedKey := regexp.QuoteMeta(key) - // Replace * with \d+ to match array indices - regexPattern := strings.ReplaceAll(escapedKey, `\*`, `\d+`) - // Add start and end of line anchors - regexPattern = "^" + regexPattern + "$" - return regexPattern -} - -func FindMatchingKeys(data map[string]interface{}, keyPattern string) map[string]interface{} { - matchingKeys := make(map[string]interface{}) - re := regexp.MustCompile(ConvertKeyToRegex(keyPattern)) - for key, value := range data { - if re.MatchString(key) { - matchingKeys[key] = value - } - } - return matchingKeys -} - -func StringLikePatterns(str string, keyPatterns []string) bool { - for _, pattern := range keyPatterns { - re := regexp.MustCompile(ConvertKeyToRegex(pattern)) - if re.MatchString(str) { - return true - } - } - return false -} - -func GetConstantMapKeys(mapper map[string]Constant) []string { - keys := make([]string, 0, len(mapper)) - for k := range mapper { - keys = append(keys, k) - } - return keys -} - -func FormatError(id *string, message string, target string, validator string, value string, format string) string { - errorMessage := strings.Replace(format, "%message", message, -1) - errorMessage = strings.Replace(errorMessage, "%target", target, -1) - errorMessage = strings.Replace(errorMessage, "%validator", validator, -1) - if id != nil { - value = fmt.Sprintf("[%s]:%s", *id, value) - } - errorMessage = strings.Replace(errorMessage, "%value", value, -1) - return errorMessage -} - -func HandleSchemaVersions(schemaBytes []byte) (*Schema, error) { - var schemaMap BaseSchemaInfo - err := json.Unmarshal(schemaBytes, &schemaMap) - if err != nil { - return nil, err - } - if stringExists(schemaMap.Version, basicSchemaVersions) { - var schema Schema - err = json.Unmarshal(schemaBytes, &schema) - if err != nil { - return nil, err - } - return &schema, nil - } - switch schemaMap.Version { - case "1.1": - schema, err := translateSchema1o1(schemaBytes) - if err != nil { - return nil, err - } - return schema, nil - } - - return nil, errors.New("unable to handle the schema") -} - -func translateSchema1o1(schemaMap []byte) (*Schema, error) { - var schema1o1 Schema1o1 - err := json.Unmarshal(schemaMap, &schema1o1) - if err != nil { - return nil, err - } - - var baseSchema Schema - - var fields []Field - - for _, f := range schema1o1.Fields { - fd := Field{ - DependsOn: f.DependsOn, - TargetKey: f.TargetKey, - Description: f.Description, - Validators: make(map[string]Constant), - Operators: make(map[string]Constant), - Name: f.Name, - AdditionalInformation: f.AdditionalInformation, - L10n: f.L10n, - Type: f.Type, - } - for _, validator := range f.Validators { - fd.Validators[validator.Name] = Constant{ - Attributes: validator.Attr, - ErrMsg: validator.Err, - } - } - for _, operator := range f.Operators { - fd.Operators[operator.Name] = Constant{ - Attributes: operator.Attr, - ErrMsg: operator.Err, - } - } - fields = append(fields, fd) - } - baseSchema.Fields = fields - return &baseSchema, nil -}