diff --git a/go.work.sum b/go.work.sum index 295469aad7..476818aea8 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,7 +1,15 @@ cloud.google.com/go v0.110.10/go.mod h1:v1OoFqYxiBkUrruItNM3eT4lLByNjxmJSV/xDKJNnic= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/alecthomas/chroma/v2 v2.5.0/go.mod h1:yrkMI9807G1ROx13fhe1v6PN2DDeaR73L3d+1nmYQtw= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= @@ -9,12 +17,15 @@ github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkX github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d/go.mod h1:tmAIfUFEirG/Y8jhZ9M+h36obRZAk/1fcSpXwAVlfqE= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg= +github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/fxamacker/cbor/v2 v2.4.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= @@ -24,34 +35,38 @@ github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjh github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y= github.com/lestrrat-go/blackmagic v1.0.0/go.mod h1:TNgH//0vYSs8VXDCfkZLgIrVTTXQELZffUV0tz3MtdQ= github.com/lestrrat-go/iter v1.0.1/go.mod h1:zIdgO1mRKhn8l9vrZJZz9TUMMFbQbLeTsbqPDrJ/OJc= github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= -github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN30b8= github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark v1.5.4/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= diff --git a/integration/config.go b/integration/config.go new file mode 100644 index 0000000000..70affbd8d0 --- /dev/null +++ b/integration/config.go @@ -0,0 +1,15 @@ +package integration + +import "github.com/opentdf/opentdf-v2-poc/internal/config" + +var Config *config.Config + +func init() { + Config = &config.Config{} + + Config.DB.User = "postgres" + Config.DB.Password = "postgres" + Config.DB.Host = "localhost" + Config.DB.Port = 5432 + Config.DB.Database = "opentdf-test" +} diff --git a/integration/db.go b/integration/db.go new file mode 100644 index 0000000000..a7631589a9 --- /dev/null +++ b/integration/db.go @@ -0,0 +1,81 @@ +package integration + +import ( + "context" + "log/slog" + "strings" + + "github.com/opentdf/opentdf-v2-poc/internal/db" +) + +type DBInterface struct { + Client *db.Client + schema string +} + +func NewDBInterface(schema string) DBInterface { + config := Config.DB + config.Schema = schema + c, err := db.NewClient(config) + if err != nil { + slog.Error("issue creating database client", slog.String("error", err.Error())) + panic(err) + } + return DBInterface{ + Client: c, + schema: schema, + } +} + +func (d *DBInterface) StringArrayWrap(values []string) string { + // if len(values) == 0 { + // return "null" + // } + var vs []string + for _, v := range values { + vs = append(vs, d.StringWrap(v)) + } + return "ARRAY [" + strings.Join(vs, ",") + "]" +} + +func (d *DBInterface) UUIDArrayWrap(v []string) string { + return "(" + d.StringArrayWrap(v) + ")" + "::uuid[]" +} + +func (d *DBInterface) StringWrap(v string) string { + return "'" + v + "'" +} + +func (d *DBInterface) UUIDWrap(v string) string { + return "(" + d.StringWrap(v) + ")" + "::uuid" +} + +func (d *DBInterface) TableName(v string) string { + return d.schema + "." + v +} + +func (d *DBInterface) ExecInsert(table string, columns []string, values ...[]string) (int64, error) { + sql := "INSERT INTO " + d.TableName(table) + + " (" + strings.Join(columns, ",") + ")" + + " VALUES " + for i, v := range values { + if i > 0 { + sql += "," + } + sql += " (" + strings.Join(v, ",") + ")" + } + pconn, err := d.Client.Exec(context.Background(), sql) + if err != nil { + return 0, err + } + return pconn.RowsAffected(), err +} + +func (d *DBInterface) DropSchema() error { + sql := "DROP SCHEMA IF EXISTS " + d.schema + " CASCADE" + _, err := d.Client.Exec(context.Background(), sql) + if err != nil { + return err + } + return nil +} diff --git a/integration/fixtures.go b/integration/fixtures.go new file mode 100644 index 0000000000..4f496d212e --- /dev/null +++ b/integration/fixtures.go @@ -0,0 +1,217 @@ +package integration + +import ( + "log/slog" + "os" + + "gopkg.in/yaml.v2" +) + +var fixtureFilename = "fixtures.yaml" +var fixtureData FixtureData + +type FixtureMetadata struct { + TableName string `yaml:"table_name"` + Columns []string `yaml:"columns"` +} + +type FixtureDataNamespace struct { + Id string `yaml:"id"` + Name string `yaml:"name"` +} + +type FixtureDataAttribute struct { + Id string `yaml:"id"` + NamespaceId string `yaml:"namespace_id"` + Name string `yaml:"name"` + Rule string `yaml:"rule"` +} + +type FixtureDataAttributeValue struct { + Id string `yaml:"id"` + AttributeDefinitionId string `yaml:"attribute_definition_id"` + Value string `yaml:"value"` + Members []string `yaml:"members"` +} + +type FixtureDataSubjectMapping struct { + Id string `yaml:"id"` + AttributeValueId string `yaml:"attribute_value_id"` + Operator string `yaml:"operator"` + SubjectAttribute string `yaml:"subject_attribute"` + SubjectAttributeValues []string `yaml:"subject_attribute_values"` +} + +type FixtureData struct { + Namespaces struct { + Metadata FixtureMetadata `yaml:"metadata"` + Data map[string]FixtureDataNamespace `yaml:"data"` + } `yaml:"namespaces"` + Attributes struct { + Metadata FixtureMetadata `yaml:"metadata"` + Data map[string]FixtureDataAttribute `yaml:"data"` + } `yaml:"attributes"` + AttributeValues struct { + Metadata FixtureMetadata `yaml:"metadata"` + Data map[string]FixtureDataAttributeValue `yaml:"data"` + } `yaml:"attribute_values"` + SubjectMappings struct { + Metadata FixtureMetadata `yaml:"metadata"` + Data map[string]FixtureDataSubjectMapping `yaml:"data"` + } `yaml:"subject_mappings"` +} + +func loadFixtureData() { + c, err := os.ReadFile(fixtureFilename) + if err != nil { + slog.Error("could not read "+fixtureFilename, slog.String("error", err.Error())) + panic(err) + } + + if err := yaml.Unmarshal(c, &fixtureData); err != nil { + slog.Error("could not unmarshal "+fixtureFilename, slog.String("error", err.Error())) + panic(err) + } +} + +type Fixtures struct { + db DBInterface +} + +func NewFixture(db DBInterface) Fixtures { + return Fixtures{ + db: db, + } +} + +func (f *Fixtures) GetNamespaceKey(key string) FixtureDataNamespace { + if fixtureData.Namespaces.Data[key].Id == "" { + slog.Error("could not find namespace", slog.String("id", key)) + panic("could not find namespace") + } + return fixtureData.Namespaces.Data[key] +} + +func (f *Fixtures) GetAttributeKey(key string) FixtureDataAttribute { + if fixtureData.Attributes.Data[key].Id == "" { + slog.Error("could not find attributes", slog.String("id", key)) + panic("could not find attributes") + } + return fixtureData.Attributes.Data[key] +} + +func (f *Fixtures) GetAttributeValueKey(key string) FixtureDataAttributeValue { + if fixtureData.AttributeValues.Data[key].Id == "" { + slog.Error("could not find attribute-values", slog.String("id", key)) + panic("could not find attribute-values") + } + return fixtureData.AttributeValues.Data[key] +} + +func (f *Fixtures) GetSubjectMappingKey(key string) FixtureDataSubjectMapping { + if fixtureData.SubjectMappings.Data[key].Id == "" { + slog.Error("could not find subject-mappings", slog.String("id", key)) + panic("could not find subject-mappings") + } + return fixtureData.SubjectMappings.Data[key] +} + +func (f *Fixtures) Provision() { + slog.Info("📦 running migrations in schema", slog.String("schema", f.db.schema)) + f.db.Client.RunMigrations() + + slog.Info("📦 provisioning namespace data") + n := f.provisionNamespace() + slog.Info("📦 provisioning attribute data") + a := f.provisionAttribute() + slog.Info("📦 provisioning attribute value data") + aV := f.provisionAttributeValues() + slog.Info("📦 provisioning subject mapping data") + sM := f.provisionSubjectMappings() + + slog.Info("📦 provisioned fixtures data", + slog.Int64("namespaces", n), + slog.Int64("attributes", a), + slog.Int64("attribute_values", aV), + slog.Int64("subject_mappings", sM), + ) +} + +func (f *Fixtures) TearDown() { + slog.Info("🗑 dropping schema", slog.String("schema", f.db.schema)) + if err := f.db.DropSchema(); err != nil { + slog.Error("could not truncate tables", slog.String("error", err.Error())) + panic(err) + } +} + +func (f *Fixtures) provisionNamespace() int64 { + var values [][]string + for _, d := range fixtureData.Namespaces.Data { + values = append(values, + []string{ + f.db.StringWrap(d.Id), + f.db.StringWrap(d.Name), + }, + ) + } + return f.provision(fixtureData.Namespaces.Metadata.TableName, fixtureData.Namespaces.Metadata.Columns, values) +} + +func (f *Fixtures) provisionAttribute() int64 { + var values [][]string + for _, d := range fixtureData.Attributes.Data { + values = append(values, []string{ + f.db.StringWrap(d.Id), + f.db.StringWrap(d.NamespaceId), + f.db.StringWrap(d.Name), + f.db.StringWrap(d.Rule), + }) + } + return f.provision(fixtureData.Attributes.Metadata.TableName, fixtureData.Attributes.Metadata.Columns, values) +} + +func (f *Fixtures) provisionAttributeValues() int64 { + var values [][]string + for _, d := range fixtureData.AttributeValues.Data { + values = append(values, []string{ + f.db.StringWrap(d.Id), + f.db.StringWrap(d.AttributeDefinitionId), + f.db.StringWrap(d.Value), + f.db.UUIDArrayWrap(d.Members), + }) + } + return f.provision(fixtureData.AttributeValues.Metadata.TableName, fixtureData.AttributeValues.Metadata.Columns, values) +} + +func (f *Fixtures) provisionSubjectMappings() int64 { + var values [][]string + for _, d := range fixtureData.SubjectMappings.Data { + values = append(values, []string{ + f.db.StringWrap(d.Id), + f.db.UUIDWrap(d.AttributeValueId), + f.db.StringWrap(d.Operator), + f.db.StringWrap(d.SubjectAttribute), + f.db.StringArrayWrap(d.SubjectAttributeValues), + }) + } + return f.provision(fixtureData.SubjectMappings.Metadata.TableName, fixtureData.SubjectMappings.Metadata.Columns, values) +} + +func (f *Fixtures) provision(t string, c []string, v [][]string) (rows int64) { + var err error + rows, err = f.db.ExecInsert(t, c, v...) + if err != nil { + slog.Error("⛔️ 📦 issue with insert into table - check fixtures.yaml for issues", slog.String("table", t)) + panic("issue with insert into table") + } + if rows == 0 { + slog.Error("⛔️ 📦 no rows provisioned - check fixtures.yaml for issues", slog.String("table", t), slog.Int("expected", len(v))) + panic("no rows provisioned") + } + if rows != int64(len(v)) { + slog.Error("⛔️ 📦 incorrect number of rows provisioned - check fixtures.yaml for issues", slog.String("table", t), slog.Int("expected", len(v)), slog.Int64("actual", rows)) + panic("incorrect number of rows provisioned") + } + return rows +} diff --git a/integration/fixtures.yaml b/integration/fixtures.yaml new file mode 100644 index 0000000000..07df834126 --- /dev/null +++ b/integration/fixtures.yaml @@ -0,0 +1,154 @@ +## +# Namespaces +## +namespaces: + metadata: + table_name: namespaces + columns: + - id + - name + data: + example.com: + id: 00000000-0000-0000-0000-000000000000 + name: example.com + example.net: + id: 00000000-0000-0000-0000-000000000001 + name: example.net + example.org: + id: 00000000-0000-0000-0000-000000000002 + name: example.org + +## +# Attributes +# +# Attribute Rule Enum: UNSPECIFIED, ANY_OF, ALL_OF, HIERARCHY +## +attributes: + metadata: + table_name: attribute_definitions + columns: + - id + - namespace_id + - name + - rule + data: + example.com/attr/attr1: + id: 00000000-0000-0000-0000-000000000000 + namespace_id: 00000000-0000-0000-0000-000000000000 + name: attr1 + rule: ANY_OF + example.com/attr/attr2: + id: 00000000-0000-0000-0000-000000000001 + namespace_id: 00000000-0000-0000-0000-000000000000 + name: attr2 + rule: ALL_OF + + example.net/attr/attr1: + id: 00000000-0000-0000-0000-000000000002 + namespace_id: 00000000-0000-0000-0000-000000000001 + name: attr1 + rule: ANY_OF + example.net/attr/attr2: + id: 00000000-0000-0000-0000-000000000003 + namespace_id: 00000000-0000-0000-0000-000000000001 + name: attr2 + rule: ALL_OF + example.net/attr/attr3: + id: 00000000-0000-0000-0000-000000000004 + namespace_id: 00000000-0000-0000-0000-000000000001 + name: attr3 + rule: HIERARCHY + + example.org/attr/attr1: + id: 00000000-0000-0000-0000-000000000005 + namespace_id: 00000000-0000-0000-0000-000000000002 + name: attr1 + rule: ANY_OF + example.org/attr/attr2: + id: 00000000-0000-0000-0000-000000000006 + namespace_id: 00000000-0000-0000-0000-000000000002 + name: attr2 + rule: ALL_OF + example.org/attr/attr3: + id: 00000000-0000-0000-0000-000000000007 + namespace_id: 00000000-0000-0000-0000-000000000002 + name: attr3 + rule: HIERARCHY + +## +# Attribute Values +## +attribute_values: + metadata: + table_name: attribute_values + columns: + - id + - attribute_definition_id + - value + - members + data: + example.com/attr/attr1/value/value1: + id: 00000000-0000-0000-0000-000000000000 + attribute_definition_id: 00000000-0000-0000-0000-000000000000 + value: value1 + example.com_attr1/value/value2: + id: 00000000-0000-0000-0000-000000000001 + attribute_definition_id: 00000000-0000-0000-0000-000000000000 + value: value2 + members: + # example.com/attr/attr2/value/value1 + - 00000000-0000-0000-0000-000000000002 + # example.net/attr/attr1/value/value1 + - 00000000-0000-0000-0000-000000000004 + + example.com/attr/attr2/value/value1: + id: 00000000-0000-0000-0000-000000000002 + attribute_definition_id: 00000000-0000-0000-0000-000000000001 + value: value1 + example.com/attr/attr2/value/value2: + id: 00000000-0000-0000-0000-000000000003 + attribute_definition_id: 00000000-0000-0000-0000-000000000001 + value: value2 + + example.net/attr/attr1/value/value1: + id: 00000000-0000-0000-0000-000000000004 + attribute_definition_id: 00000000-0000-0000-0000-000000000002 + value: value1 + example.net/attr/attr1/value/value2: + id: 00000000-0000-0000-0000-000000000005 + attribute_definition_id: 00000000-0000-0000-0000-000000000002 + value: value2 + +## +# Subject Mappings +# +# Operator Enum: UNSPECIFIED, IN, NOT_IN +## +subject_mappings: + metadata: + table_name: subject_mappings + columns: + - id + - attribute_value_id + - operator + - subject_attribute + - subject_attribute_values + data: + subject_mapping_subject_attribute1: + id: 00000000-0000-0000-0000-000000000000 + attribute_value_id: 00000000-0000-0000-0000-000000000000 + operator: IN + subject_attribute: subject_attribute1 + subject_attribute_values: + - value1 + - value2 + + subject_mapping_subject_attribute2: + id: 00000000-0000-0000-0000-000000000001 + attribute_value_id: 00000000-0000-0000-0000-000000000001 + operator: NOT_IN + subject_attribute: subject_attribute2 + subject_attribute_values: + - value1 + - value2 + - value3 \ No newline at end of file diff --git a/integration/main_test.go b/integration/main_test.go new file mode 100644 index 0000000000..2aa98430e5 --- /dev/null +++ b/integration/main_test.go @@ -0,0 +1,159 @@ +package integration + +import ( + "context" + "fmt" + "log/slog" + "os" + "testing" + "time" + + "github.com/creasty/defaults" + tc "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +var fixtures Fixtures + +func init() { + fmt.Println("====================================================================================") + fmt.Println("") + fmt.Println(" Integration Tests") + fmt.Println("") + fmt.Println(" Testcontainers is used to run these integration tests. To get this working please") + fmt.Println(" ensure you have Docker/Podman installed and running.") + fmt.Println("") + fmt.Println(" If using Podman, export these variables:") + fmt.Println(" export TESTCONTAINERS_PODMAN=true;") + fmt.Println(" export TESTCONTAINERS_RYUK_CONTAINER_PRIVILEGED=true;") + fmt.Println(" export TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE=/var/run/docker.sock;") + fmt.Println("") + fmt.Println(" For more information please see: https://www.testcontainers.org/") + fmt.Println("") + fmt.Println(" ---------------------------------------------------------------------------------") + fmt.Println("") + fmt.Println(" Test runner hanging at '📀 starting postgres container'?") + fmt.Println(" Try restarting Docker/Podman and running the tests again.") + fmt.Println("") + fmt.Println(" Docker: docker-machine restart") + fmt.Println(" Podman: podman machine stop;podman machine start") + fmt.Println("") + fmt.Println("====================================================================================") + fmt.Println("") +} + +func TestMain(m *testing.M) { + ctx := context.Background() + conf := Config + + if err := defaults.Set(conf); err != nil { + slog.Error("could not set defaults", slog.String("error", err.Error())) + os.Exit(1) + } + + /* + For podman + export TESTCONTAINERS_RYUK_CONTAINER_PRIVILEGED=true; # needed to run Reaper (alternative disable it TESTCONTAINERS_RYUK_DISABLED=true) + export TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE=/var/run/docker.sock; # needed to apply the bind with statfs + */ + + var providerType tc.ProviderType + + if os.Getenv("TESTCONTAINERS_PODMAN") == "true" { + providerType = tc.ProviderPodman + } else { + providerType = tc.ProviderDocker + } + + req := tc.GenericContainerRequest{ + ProviderType: providerType, + ContainerRequest: tc.ContainerRequest{ + Image: "postgres:13.3", + Name: "testcontainer-postgres", + ExposedPorts: []string{"5432/tcp"}, + + Env: map[string]string{ + "POSTGRES_USER": conf.DB.User, + "POSTGRES_PASSWORD": conf.DB.Password, + "POSTGRES_DB": conf.DB.Database, + }, + + WaitingFor: wait.ForExec([]string{"pg_isready", "-h", "localhost", "-U", conf.DB.User}).WithStartupTimeout(120 * time.Second), + }, + Started: true, + } + + slog.Info("📀 starting postgres container") + postgres, err := tc.GenericContainer(context.Background(), req) + if err != nil { + slog.Error("could not start postgres container", slog.String("error", err.Error())) + panic(err) + } + + // Cleanup the container + defer func() { + if err := postgres.Terminate(ctx); err != nil { + slog.Error("could not stop postgres container", slog.String("error", err.Error())) + return + } + + if err := recover(); err != nil { + os.Exit(1) + } + }() + + port, err := postgres.MappedPort(ctx, "5432/tcp") + if err != nil { + slog.Error("could not get postgres mapped port", slog.String("error", err.Error())) + panic(err) + } + + conf.DB.Port = port.Int() + + db := NewDBInterface("test_opentdf") + if err != nil { + slog.Error("issue creating database client", slog.String("error", err.Error())) + panic(err) + } + + slog.Info("🚚 applying migrations") + applied, err := db.Client.RunMigrations() + if err != nil { + slog.Error("issue running migrations", slog.String("error", err.Error())) + panic(err) + } + slog.Info("🚚 applied migrations", slog.Int("count", applied)) + + slog.Info("🏠 loading fixtures") + loadFixtureData() + + // otdf, err := server.NewOpenTDFServer(conf.Server) + // if err != nil { + // slog.Error("issue creating opentdf server", slog.String("error", err.Error())) + // panic(err) + // } + // defer otdf.Stop() + + // slog.Info("starting opa engine") + // // Start the opa engine + // conf.OPA.Embedded = true + // eng, err := opa.NewEngine(conf.OPA) + // if err != nil { + // slog.Error("could not start opa engine", slog.String("error", err.Error())) + // panic(err) + // } + // defer eng.Stop(context.Background()) + + // // Register the services + // err = cmd.RegisterServices(*conf, otdf, dbClient, eng) + // if err != nil { + // slog.Error("issue registering services", slog.String("error", err.Error())) + // panic(err) + // } + + // // Start the server + // slog.Info("starting opentdf server", slog.Int("grpcPort", conf.Server.Grpc.Port), slog.Int("httpPort", conf.Server.HTTP.Port)) + // otdf.Run() + + m.Run() +} diff --git a/integration/subject_mappings_test.go b/integration/subject_mappings_test.go new file mode 100644 index 0000000000..f1108f86cd --- /dev/null +++ b/integration/subject_mappings_test.go @@ -0,0 +1,75 @@ +package integration + +import ( + "context" + "log/slog" + "testing" + + "github.com/opentdf/opentdf-v2-poc/sdk/common" + "github.com/opentdf/opentdf-v2-poc/sdk/subjectmapping" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" +) + +type SubjectMappingsSuite struct { + suite.Suite + schema string + f Fixtures + db DBInterface + ctx context.Context +} + +func (s *SubjectMappingsSuite) SetupSuite() { + slog.Info("setting up db.SubjectMappings test suite") + s.ctx = context.Background() + s.schema = "test_opentdf_subject_mappings" + s.db = NewDBInterface(s.schema) + s.f = NewFixture(s.db) + s.f.Provision() +} + +func (s *SubjectMappingsSuite) TearDownSuite() { + slog.Info("tearing down db.SubjectMappings test suite") + s.f.TearDown() +} + +func (s *SubjectMappingsSuite) Test_CreateSubjectMapping() { + metadata := &common.MetadataMutable{} + + attrValue := fixtures.GetAttributeValueKey("example.com/attr/attr1/value/value1") + mapping := &subjectmapping.SubjectMappingCreateUpdate{ + AttributeValueId: attrValue.Id, + Operator: subjectmapping.SubjectMappingOperatorEnum_SUBJECT_MAPPING_OPERATOR_ENUM_IN, + SubjectAttribute: "subject_attribute--test", + SubjectValues: []string{"subject_attribute_values--test1", "subject_attribute_values--test2"}, + Metadata: metadata, + } + createdMapping, err := s.db.Client.CreateSubjectMapping(s.ctx, mapping) + assert.Nil(s.T(), err) + assert.NotNil(s.T(), createdMapping) +} + +func (s *SubjectMappingsSuite) Test_GetSubjectMapping() { + attrValue := fixtures.GetAttributeValueKey("example.com/attr/attr1/value/value1") + mapping := &subjectmapping.SubjectMappingCreateUpdate{ + AttributeValueId: attrValue.Id, + Operator: subjectmapping.SubjectMappingOperatorEnum_SUBJECT_MAPPING_OPERATOR_ENUM_IN, + SubjectAttribute: "subject_attribute--test", + SubjectValues: []string{"subject_attribute_values--test1", "subject_attribute_values--test2"}, + Metadata: &common.MetadataMutable{}, + } + createdMapping, err := s.db.Client.CreateSubjectMapping(s.ctx, mapping) + assert.Nil(s.T(), err) + assert.NotNil(s.T(), createdMapping) + + gotMapping, err := s.db.Client.GetSubjectMapping(s.ctx, createdMapping.Id) + assert.Nil(s.T(), err) + assert.NotNil(s.T(), gotMapping) +} + +func TestSubjectMappingSuite(t *testing.T) { + if testing.Short() { + t.Skip("skipping attributes integration tests") + } + suite.Run(t, new(SubjectMappingsSuite)) +} diff --git a/internal/db/db.go b/internal/db/db.go index 8f49a6a54c..de0134bd47 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -13,8 +13,6 @@ import ( ) var ( - Schema = "opentdf" - TableAttributes = "attribute_definitions" TableAttributeValues = "attribute_values" TableNamespaces = "attribute_namespaces" @@ -25,6 +23,48 @@ var ( TableSubjectMappings = "subject_mappings" ) +var Tables struct { + Attributes Table + AttributeValues Table + Namespaces Table + KeyAccessServerRegistry Table + AttributeKeyAccessGrants Table + AttributeValueKeyAccessGrants Table + ResourceMappings Table + SubjectMappings Table +} + +type Table struct { + name string + schema string + withSchema bool +} + +func NewTable(name string, schema string) Table { + return Table{ + name: name, + schema: schema, + withSchema: true, + } +} + +func (t Table) WithoutSchema() Table { + nT := NewTable(t.name, t.schema) + nT.withSchema = false + return nT +} + +func (t Table) Name() string { + if t.withSchema { + return t.schema + "." + string(t.name) + } + return string(t.name) +} + +func (t Table) Field(field string) string { + return t.Name() + "." + field +} + // We can rename this but wanted to get mocks working. type PgxIface interface { Acquire(ctx context.Context) (*pgxpool.Conn, error) @@ -44,11 +84,22 @@ type Config struct { Password string `yaml:"password" default:"changeme"` RunMigrations bool `yaml:"runMigrations" default:"true"` SSLMode string `yaml:"sslmode" default:"prefer"` + Schema string `yaml:"schema" default:"opentdf"` } type Client struct { PgxIface config Config + Tables struct { + Attributes Table + AttributeValues Table + Namespaces Table + KeyAccessServerRegistry Table + AttributeKeyAccessGrants Table + AttributeValueKeyAccessGrants Table + ResourceMappings Table + SubjectMappings Table + } } func NewClient(config Config) (*Client, error) { @@ -56,6 +107,16 @@ func NewClient(config Config) (*Client, error) { if err != nil { return nil, fmt.Errorf("failed to create pgxpool: %w", err) } + + Tables.Attributes = NewTable(TableAttributes, config.Schema) + Tables.AttributeValues = NewTable(TableAttributeValues, config.Schema) + Tables.Namespaces = NewTable(TableNamespaces, config.Schema) + Tables.KeyAccessServerRegistry = NewTable(TableKeyAccessServerRegistry, config.Schema) + Tables.AttributeKeyAccessGrants = NewTable(TableAttributeKeyAccessGrants, config.Schema) + Tables.AttributeValueKeyAccessGrants = NewTable(TableAttributeValueKeyAccessGrants, config.Schema) + Tables.ResourceMappings = NewTable(TableResourceMappings, config.Schema) + Tables.SubjectMappings = NewTable(TableSubjectMappings, config.Schema) + return &Client{ PgxIface: pool, config: config, @@ -111,7 +172,7 @@ func newStatementBuilder() sq.StatementBuilderType { } func tableName(table string) string { - return Schema + "." + table + return table } func tableField(table string, field string) string { diff --git a/internal/db/db_migration.go b/internal/db/db_migration.go index 6884d9b132..fc27875a7d 100644 --- a/internal/db/db_migration.go +++ b/internal/db/db_migration.go @@ -24,6 +24,11 @@ func (c *Client) RunMigrations() (int, error) { return applied, nil } + // create the schema + c.Exec(context.Background(), fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s", c.config.Schema)) + // set the search path + c.Exec(context.Background(), fmt.Sprintf("SET search_path TO %s", c.config.Schema)) + pool, ok := c.PgxIface.(*pgxpool.Pool) if !ok || pool == nil { return applied, fmt.Errorf("failed to cast pgxpool.Pool") diff --git a/internal/db/subject_mappings.go b/internal/db/subject_mappings.go index acb6514494..dcec586e7b 100644 --- a/internal/db/subject_mappings.go +++ b/internal/db/subject_mappings.go @@ -12,8 +12,7 @@ import ( "google.golang.org/protobuf/encoding/protojson" ) -var SubjectMappingTable = tableName(TableSubjectMappings) -var SubjectMappingOperatorEnumPrefix = "SUBJECT_MAPPINGS_OPERATOR_ENUM_" +var SubjectMappingOperatorEnumPrefix = "SUBJECT_MAPPING_OPERATOR_ENUM_" func subjectMappingOperatorEnumTransformIn(value string) string { return strings.TrimPrefix(value, SubjectMappingOperatorEnumPrefix) @@ -24,22 +23,23 @@ func subjectMappingOperatorEnumTransformOut(value string) subjectmapping.Subject } func subjectMappingSelect() sq.SelectBuilder { + t := Tables.SubjectMappings + aT := Tables.AttributeValues return newStatementBuilder().Select( - tableField(SubjectMappingTable, "id"), - tableField(SubjectMappingTable, "operator"), - tableField(SubjectMappingTable, "subject_attribute"), - tableField(SubjectMappingTable, "subject_attribute_values"), - tableField(SubjectMappingTable, "metadata"), - "JSON_AGG("+ - "JSON_BUILD_OBJECT("+ - "'id', "+tableField(AttributeValueTable, "id")+", "+ - "'value', "+tableField(AttributeValueTable, "value")+","+ - "'members', "+tableField(AttributeValueTable, "members")+ - ")"+ + t.Field("id"), + t.Field("operator"), + t.Field("subject_attribute"), + t.Field("subject_attribute_values"), + t.Field("metadata"), + "JSON_BUILD_OBJECT("+ + "'id', "+aT.Field("id")+", "+ + "'value', "+aT.Field("value")+","+ + "'members', "+aT.Field("members")+ ") AS attribute_value", ). - LeftJoin(AttributeValueTable + " ON " + tableField(AttributeValueTable, "id") + " = " + tableField(SubjectMappingTable, "id")). - GroupBy(tableField(SubjectMappingTable, "id")) + LeftJoin(aT.Name() + " ON " + t.Field("id") + " = " + t.Field("id")). + GroupBy(t.Field("id")). + GroupBy(aT.Field("id")) } func subjectMappingHydrateItem(row pgx.Row) (*subjectmapping.SubjectMapping, error) { @@ -72,8 +72,8 @@ func subjectMappingHydrateItem(row pgx.Row) (*subjectmapping.SubjectMapping, err } v := &attributes.Value{} - if metadataJson != nil { - if err := protojson.Unmarshal(metadataJson, v); err != nil { + if attributeValueJson != nil { + if err := protojson.Unmarshal(attributeValueJson, v); err != nil { return nil, err } } @@ -106,8 +106,9 @@ func subjectMappingHydrateList(rows pgx.Rows) ([]*subjectmapping.SubjectMapping, /// func createSubjectMappingSql(attribute_value_id string, operator string, subject_attribute string, subject_attribute_values []string, metadata []byte) (string, []interface{}, error) { + t := Tables.SubjectMappings return newStatementBuilder(). - Insert(SubjectMappingTable). + Insert(t.Name()). Columns( "attribute_value_id", "operator", @@ -160,9 +161,10 @@ func (c *Client) CreateSubjectMapping(ctx context.Context, s *subjectmapping.Sub } func getSubjectMappingSql(id string) (string, []interface{}, error) { + t := Tables.SubjectMappings return subjectMappingSelect(). - From(SubjectMappingTable). - Where(sq.Eq{"id": id}). + From(t.Name()). + Where(sq.Eq{t.Field("id"): id}). ToSql() } func (c *Client) GetSubjectMapping(ctx context.Context, id string) (*subjectmapping.SubjectMapping, error) { @@ -182,8 +184,9 @@ func (c *Client) GetSubjectMapping(ctx context.Context, id string) (*subjectmapp } func listSubjectMappingsSql() (string, []interface{}, error) { + t := Tables.SubjectMappings return subjectMappingSelect(). - From(SubjectMappingTable). + From(t.Name()). ToSql() } func (c *Client) ListSubjectMappings(ctx context.Context) ([]*subjectmapping.SubjectMapping, error) { @@ -207,8 +210,9 @@ func (c *Client) ListSubjectMappings(ctx context.Context) ([]*subjectmapping.Sub } func updateSubjectMappingSql(id string, attribute_value_id string, operator string, subject_attribute string, subject_attribute_values []string, metadata []byte) (string, []interface{}, error) { + t := Tables.SubjectMappings sb := newStatementBuilder(). - Update(SubjectMappingTable) + Update(t.Name()) if attribute_value_id != "" { sb.Set("attribute_value_id", attribute_value_id) @@ -261,8 +265,9 @@ func (c *Client) UpdateSubjectMapping(ctx context.Context, id string, s *subject } func deleteSubjectMappingSql(id string) (string, []interface{}, error) { + t := Tables.SubjectMappings return newStatementBuilder(). - Delete(SubjectMappingTable). + Delete(t.Name()). Where(sq.Eq{"id": id}). ToSql() } diff --git a/internal/db/subject_mappings_test.go b/internal/db/subject_mappings_test.go new file mode 100644 index 0000000000..83990b872e --- /dev/null +++ b/internal/db/subject_mappings_test.go @@ -0,0 +1,54 @@ +package db + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" +) + +type SubjectMappingTestSuite struct { + suite.Suite + db *Client +} + +func (suite *SubjectMappingTestSuite) SetupSuite() { +} + +func (suite *SubjectMappingTestSuite) TearDownSuite() { +} + +func (suite *SubjectMappingTestSuite) TestCreateSubjectMappingSql() { + expectedColumns := strings.Join([]string{ + "attribute_value_id", + "operator", + "subject_attribute", + "subject_attribute_values", + "metadata", + }, ",") + expectedValues := []interface{}{ + "attribute_value_id--test", + "operator--test", + "subject_attribute--test", + []string{"subject_attribute_values--test1", "subject_attribute_values--test2"}, + []byte("a"), + } + + sql, args, err := createSubjectMappingSql( + expectedValues[0].(string), + expectedValues[1].(string), + expectedValues[2].(string), + expectedValues[3].([]string), + expectedValues[4].([]byte), + ) + + assert.Nil(suite.T(), err) + assert.Contains(suite.T(), sql, "INSERT INTO "+SubjectMappingTable+" ("+expectedColumns+")") + assert.Contains(suite.T(), sql, "VALUES ($1,$2,$3,$4,$5)") + assert.Equal(suite.T(), expectedValues, args) +} + +func TestDBSubjectMappingTestSuite(t *testing.T) { + suite.Run(t, new(SubjectMappingTestSuite)) +} diff --git a/migrations/20230101000000_create_schema.sql b/migrations/20230101000000_create_schema.sql new file mode 100644 index 0000000000..34766eb21b --- /dev/null +++ b/migrations/20230101000000_create_schema.sql @@ -0,0 +1,12 @@ +-- +goose Up +-- +goose StatementBegin + +-- +-- schema creation is dynamic and is not included in this file +-- + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin +-- +goose StatementEnd diff --git a/migrations/20231208092252_create_opentdf.sql b/migrations/20231208092252_create_opentdf.sql index 2379e8ad56..3dba7a8cca 100644 --- a/migrations/20231208092252_create_opentdf.sql +++ b/migrations/20231208092252_create_opentdf.sql @@ -2,9 +2,7 @@ -- +goose StatementBegin SELECT 'up SQL query'; -CREATE SCHEMA IF NOT EXISTS opentdf; - -CREATE TABLE IF NOT EXISTS opentdf.resources +CREATE TABLE IF NOT EXISTS resources ( id SERIAL PRIMARY KEY, name VARCHAR NOT NULL, @@ -21,5 +19,5 @@ CREATE TABLE IF NOT EXISTS opentdf.resources -- +goose Down -- +goose StatementBegin -SELECT 'down SQL query'; +DROP TABLE IF EXISTS resources; -- +goose StatementEnd diff --git a/migrations/20240118000000_create_new_tables.sql b/migrations/20240118000000_create_new_tables.sql index 51f49e70dd..fad6360869 100644 --- a/migrations/20240118000000_create_new_tables.sql +++ b/migrations/20240118000000_create_new_tables.sql @@ -1,38 +1,37 @@ -- +goose Up -- +goose StatementBegin -CREATE SCHEMA IF NOT EXISTS opentdf; CREATE TYPE attribute_definition_rule AS ENUM ('UNSPECIFIED', 'ALL_OF', 'ANY_OF', 'HIERARCHY'); CREATE TYPE subject_mappings_operator AS ENUM ('UNSPECIFIED', 'IN', 'NOT_IN'); -CREATE TABLE IF NOT EXISTS opentdf.attribute_namespaces +CREATE TABLE IF NOT EXISTS attribute_namespaces ( -- generate on create id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name VARCHAR NOT NULL UNIQUE ); -CREATE TABLE IF NOT EXISTS opentdf.attribute_definitions +CREATE TABLE IF NOT EXISTS attribute_definitions ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - namespace_id UUID NOT NULL REFERENCES opentdf.attribute_namespaces(id), + namespace_id UUID NOT NULL REFERENCES attribute_namespaces(id), name VARCHAR NOT NULL, rule attribute_definition_rule NOT NULL, metadata JSONB, UNIQUE (namespace_id, name) ); -CREATE TABLE IF NOT EXISTS opentdf.attribute_values +CREATE TABLE IF NOT EXISTS attribute_values ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - attribute_definition_id UUID NOT NULL REFERENCES opentdf.attribute_definitions(id), + attribute_definition_id UUID NOT NULL REFERENCES attribute_definitions(id), value VARCHAR NOT NULL, members UUID[] NOT NULL, metadata JSONB, UNIQUE (attribute_definition_id, value) ); -CREATE TABLE IF NOT EXISTS opentdf.key_access_servers +CREATE TABLE IF NOT EXISTS key_access_servers ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), key_access_server VARCHAR NOT NULL UNIQUE, @@ -40,32 +39,32 @@ CREATE TABLE IF NOT EXISTS opentdf.key_access_servers metadata JSONB ); -CREATE TABLE IF NOT EXISTS opentdf.attribute_definition_key_access_grants +CREATE TABLE IF NOT EXISTS attribute_definition_key_access_grants ( - attribute_definition_id UUID NOT NULL REFERENCES opentdf.attribute_definitions(id), - key_access_server_id UUID NOT NULL REFERENCES opentdf.key_access_servers(id), + attribute_definition_id UUID NOT NULL REFERENCES attribute_definitions(id), + key_access_server_id UUID NOT NULL REFERENCES key_access_servers(id), PRIMARY KEY (attribute_definition_id, key_access_server_id) ); -CREATE TABLE IF NOT EXISTS opentdf.attribute_value_key_access_grants +CREATE TABLE IF NOT EXISTS attribute_value_key_access_grants ( - attribute_value_id UUID NOT NULL REFERENCES opentdf.attribute_values(id), - key_access_server_id UUID NOT NULL REFERENCES opentdf.key_access_servers(id), + attribute_value_id UUID NOT NULL REFERENCES attribute_values(id), + key_access_server_id UUID NOT NULL REFERENCES key_access_servers(id), PRIMARY KEY (attribute_value_id, key_access_server_id) ); -CREATE TABLE IF NOT EXISTS opentdf.resource_mappings +CREATE TABLE IF NOT EXISTS resource_mappings ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - attribute_value_id UUID NOT NULL REFERENCES opentdf.attribute_values(id), + attribute_value_id UUID NOT NULL REFERENCES attribute_values(id), terms VARCHAR[], metadata JSONB ); -CREATE TABLE IF NOT EXISTS opentdf.subject_mappings +CREATE TABLE IF NOT EXISTS subject_mappings ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - attribute_value_id UUID NOT NULL REFERENCES opentdf.attribute_values(id), + attribute_value_id UUID NOT NULL REFERENCES attribute_values(id), operator subject_mappings_operator NOT NULL, subject_attribute VARCHAR NOT NULL, subject_attribute_values VARCHAR[], @@ -75,14 +74,14 @@ CREATE TABLE IF NOT EXISTS opentdf.subject_mappings -- +goose Down -- +goose StatementBegin -DROP TABLE IF EXISTS opentdf.subject_mappings; -DROP TABLE IF EXISTS opentdf.resource_mappings; -DROP TABLE IF EXISTS opentdf.attribute_value_key_access_grants; -DROP TABLE IF EXISTS opentdf.attribute_definition_key_access_grants; -DROP TABLE IF EXISTS opentdf.key_access_servers; -DROP TABLE IF EXISTS opentdf.attribute_values; -DROP TABLE IF EXISTS opentdf.attribute_definitions; -DROP TABLE IF EXISTS opentdf.attribute_namespaces; +DROP TABLE IF EXISTS subject_mappings; +DROP TABLE IF EXISTS resource_mappings; +DROP TABLE IF EXISTS attribute_value_key_access_grants; +DROP TABLE IF EXISTS attribute_definition_key_access_grants; +DROP TABLE IF EXISTS key_access_servers; +DROP TABLE IF EXISTS attribute_values; +DROP TABLE IF EXISTS attribute_definitions; +DROP TABLE IF EXISTS attribute_namespaces; DROP TYPE attribute_definition_rule; DROP TYPE subject_mappings_operator; diff --git a/sdk/sdk.go b/sdk/sdk.go index 49a5cd91e2..445e896b6a 100644 --- a/sdk/sdk.go +++ b/sdk/sdk.go @@ -4,7 +4,7 @@ import ( "errors" "github.com/opentdf/opentdf-v2-poc/sdk/attributes" - "github.com/opentdf/opentdf-v2-poc/sdk/keyaccessserverregistry" + "github.com/opentdf/opentdf-v2-poc/sdk/kasregistry" "github.com/opentdf/opentdf-v2-poc/sdk/namespaces" "github.com/opentdf/opentdf-v2-poc/sdk/resourcemapping" "github.com/opentdf/opentdf-v2-poc/sdk/subjectmapping" @@ -29,7 +29,7 @@ type SDK struct { Attributes attributes.AttributesServiceClient ResourceMapping resourcemapping.ResourceMappingServiceClient SubjectMapping subjectmapping.SubjectMappingServiceClient - KeyAccessServerRegistry keyaccessserverregistry.KeyAccessServerRegistryServiceClient + KeyAccessServerRegistry kasregistry.KeyAccessServerRegistryServiceClient } func New(platformEndpoint string, opts ...Option) (*SDK, error) { @@ -54,7 +54,7 @@ func New(platformEndpoint string, opts ...Option) (*SDK, error) { Namespaces: namespaces.NewNamespaceServiceClient(conn), ResourceMapping: resourcemapping.NewResourceMappingServiceClient(conn), SubjectMapping: subjectmapping.NewSubjectMappingServiceClient(conn), - KeyAccessServerRegistry: keyaccessserverregistry.NewKeyAccessServerRegistryServiceClient(conn), + KeyAccessServerRegistry: kasregistry.NewKeyAccessServerRegistryServiceClient(conn), }, nil } diff --git a/tests/acre_test.go b/tests/acre_test.go deleted file mode 100644 index d55d45ff50..0000000000 --- a/tests/acre_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package tests - -import ( - "testing" - - "github.com/stretchr/testify/suite" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" -) - -type AcreSuite struct { - suite.Suite - conn *grpc.ClientConn - // client acre.ResourceEncodingServiceClient -} - -func (suite *AcreSuite) SetupSuite() { - conn, err := grpc.Dial("localhost:9000", grpc.WithTransportCredentials(insecure.NewCredentials())) - if err != nil { - suite.T().Fatal(err) - } - suite.conn = conn - - // suite.client = acre.NewResourceEncodingServiceClient(conn) -} - -func (suite *AcreSuite) TearDownSuite() { - suite.conn.Close() -} - -func TestAcreSuite(t *testing.T) { - if testing.Short() { - t.Skip("skipping acre integration tests") - } - suite.Run(t, new(AcreSuite)) -} diff --git a/tests/attributes_test.go b/tests/attributes_test.go deleted file mode 100644 index de12a647b5..0000000000 --- a/tests/attributes_test.go +++ /dev/null @@ -1,171 +0,0 @@ -package tests - -import ( - "github.com/opentdf/opentdf-v2-poc/sdk/attributes" - - "github.com/stretchr/testify/suite" - "google.golang.org/grpc" -) - -const ( - definitionsTestData = "testdata/attributes/attribute_definitions.json" -) - -type AttributesSuite struct { - suite.Suite - conn *grpc.ClientConn - client attributes.AttributesServiceClient -} - -// func (suite *AttributesSuite) SetupSuite() { -// ctx := context.Background() -// conn, err := grpc.Dial("localhost:9000", grpc.WithTransportCredentials(insecure.NewCredentials())) -// if err != nil { -// slog.Error("could not connect", slog.String("error", err.Error())) -// suite.T().Fatal(err) -// } -// suite.conn = conn - -// suite.client = attributes.NewAttributesServiceClient(conn) - -// testData, err := os.ReadFile(definitionsTestData) -// if err != nil { -// slog.Error("could not read attributes.json", slog.String("error", err.Error())) -// suite.T().Fatal(err) -// } - -// var attrs = make([]*attributes.Attribute, 0) - -// err = json.Unmarshal(testData, &attrs) - -// if err != nil { -// slog.Error("could not unmarshal attributes.json", slog.String("error", err.Error())) -// suite.T().Fatal(err) -// } - -// for _, attr := range attrs { -// _, err = suite.client.CreateAttribute(ctx, &attributes.CreateAttributeRequest{ -// Attribute: attr, -// }) -// if err != nil { -// slog.Error("could not create attribute", slog.String("error", err.Error())) -// suite.T().Fatal(err) -// } -// } -// slog.Info("loaded attributes test data") -// } - -// func (suite *AttributesSuite) TearDownSuite() { -// slog.Info("tearing down attributes test suite") -// defer suite.conn.Close() -// } - -// func TestAttributeSuite(t *testing.T) { -// if testing.Short() { -// t.Skip("skipping attributes integration tests") -// } -// suite.Run(t, new(AttributesSuite)) -// } - -// func (suite *AttributesSuite) Test_CreateAttribute_Returns_Success_When_Valid_Definition() { -// definition := attributes.AttributeDefinition{ -// Name: "relto", -// Rule: attributes.AttributeDefinition_ATTRIBUTE_RULE_TYPE_ANY_OF, -// Values: []*attributes.AttributeDefinitionValue{ -// { -// Value: "USA", -// }, -// { -// Value: "GBR", -// }, -// }, -// Descriptor_: &common.ResourceDescriptor{ -// Version: 1, -// Namespace: "virtru.com", -// Name: "relto", -// Type: common.PolicyResourceType_POLICY_RESOURCE_TYPE_ATTRIBUTE_DEFINITION, -// }, -// } - -// _, err := suite.client.CreateAttribute(context.Background(), &attributes.CreateAttributeRequest{ -// Definition: &definition, -// }) - -// assert.Nil(suite.T(), err) -// } - -// func (suite *AttributesSuite) Test_CreateAttribute_Returns_BadRequest_When_InvalidRuleType() { -// definition := attributes.AttributeDefinition{ -// Name: "relto", -// Rule: 543, -// Values: []*attributes.AttributeDefinitionValue{ -// { -// Value: "USA", -// }, -// { -// Value: "GBR", -// }, -// }, -// Descriptor_: &common.ResourceDescriptor{ -// Version: 1, -// Namespace: "virtru.com", -// Name: "relto", -// Type: common.PolicyResourceType_POLICY_RESOURCE_TYPE_ATTRIBUTE_DEFINITION, -// }, -// } - -// _, err := suite.client.CreateAttribute(context.Background(), &attributes.CreateAttributeRequest{ -// Definition: &definition, -// }) - -// if assert.Error(suite.T(), err) { -// st, _ := status.FromError(err) -// assert.Equal(suite.T(), codes.InvalidArgument, st.Code()) -// assert.Equal(suite.T(), st.Message(), "validation error:\n - definition.rule: value must be one of the defined enum values [enum.defined_only]") -// } -// } - -// func (suite *AttributesSuite) Test_CreateAttribute_Returns_BadRequest_When_InvalidNamespace() { -// definition := attributes.AttributeDefinition{ -// Name: "relto", -// Rule: attributes.AttributeDefinition_ATTRIBUTE_RULE_TYPE_ANY_OF, -// Values: []*attributes.AttributeDefinitionValue{ -// { -// Value: "USA", -// }, -// { -// Value: "GBR", -// }, -// }, -// Descriptor_: &common.ResourceDescriptor{ -// Version: 1, -// Namespace: "virtru", -// Name: "relto", -// Type: common.PolicyResourceType_POLICY_RESOURCE_TYPE_ATTRIBUTE_DEFINITION, -// }, -// } - -// _, err := suite.client.CreateAttribute(context.Background(), &attributes.CreateAttributeRequest{ -// Definition: &definition, -// }) - -// if assert.Error(suite.T(), err) { -// st, _ := status.FromError(err) -// assert.Equal(suite.T(), codes.InvalidArgument, st.Code()) -// assert.Equal(suite.T(), st.Message(), "validation error:\n - definition.descriptor.namespace: Namespace must be a valid hostname. It should include at least one dot, with each segment (label) starting and ending with an alphanumeric character. Each label must be 1 to 63 characters long, allowing hyphens but not as the first or last character. The top-level domain (the last segment after the final dot) must consist of at least two alphabetic characters. [namespace_format]") -// } -// } - -// func (suite *AttributesSuite) Test_GetAttribute_Returns_NotFound_When_ID_Does_Not_Exist() { -// definition, err := suite.client.GetAttribute(context.Background(), &attributes.GetAttributeRequest{ -// Id: 10000, -// }) -// assert.Nil(suite.T(), definition) -// assert.NotNil(suite.T(), err) - -// if assert.Error(suite.T(), err) { -// st, _ := status.FromError(err) -// assert.Equal(suite.T(), codes.NotFound, st.Code()) -// assert.Equal(suite.T(), st.Message(), services.ErrNotFound) -// } -// } diff --git a/tests/main_test.go b/tests/main_test.go deleted file mode 100644 index 56f61f97f9..0000000000 --- a/tests/main_test.go +++ /dev/null @@ -1,135 +0,0 @@ -package tests - -import ( - "context" - "log/slog" - "os" - "testing" - "time" - - "github.com/creasty/defaults" - "github.com/opentdf/opentdf-v2-poc/cmd" - "github.com/opentdf/opentdf-v2-poc/internal/config" - "github.com/opentdf/opentdf-v2-poc/internal/db" - "github.com/opentdf/opentdf-v2-poc/internal/opa" - "github.com/opentdf/opentdf-v2-poc/internal/server" - tc "github.com/testcontainers/testcontainers-go" - "github.com/testcontainers/testcontainers-go/wait" -) - -func TestMain(m *testing.M) { - ctx := context.Background() - conf := &config.Config{} - - if err := defaults.Set(conf); err != nil { - slog.Error("could not set defaults", slog.String("error", err.Error())) - os.Exit(1) - } - - /* - For podman - export TESTCONTAINERS_RYUK_CONTAINER_PRIVILEGED=true; # needed to run Reaper (alternative disable it TESTCONTAINERS_RYUK_DISABLED=true) - export TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE=/var/run/docker.sock; # needed to apply the bind with statfs - */ - - var providerType tc.ProviderType - - if os.Getenv("TESTCONTAINERS_PODMAN") == "true" { - providerType = tc.ProviderPodman - } else { - providerType = tc.ProviderDocker - } - - req := tc.GenericContainerRequest{ - ProviderType: providerType, - ContainerRequest: tc.ContainerRequest{ - Image: "postgres:13.3", - Name: "testcontainer-postgres", - ExposedPorts: []string{"5432/tcp"}, - - Env: map[string]string{ - "POSTGRES_USER": conf.DB.User, - "POSTGRES_PASSWORD": conf.DB.Password, - "POSTGRES_DB": conf.DB.Database, - }, - - WaitingFor: wait.ForExec([]string{"pg_isready", "-h", "localhost", "-U", conf.DB.User}).WithStartupTimeout(120 * time.Second), - }, - Started: true, - } - - postgres, err := tc.GenericContainer(context.Background(), req) - if err != nil { - slog.Error("could not start postgres container", slog.String("error", err.Error())) - panic(err) - } - - // Cleanup the container - defer func() { - if err := postgres.Terminate(ctx); err != nil { - slog.Error("could not stop postgres container", slog.String("error", err.Error())) - return - } - - if err := recover(); err != nil { - os.Exit(1) - } - }() - - port, err := postgres.MappedPort(ctx, "5432/tcp") - if err != nil { - slog.Error("could not get postgres mapped port", slog.String("error", err.Error())) - panic(err) - } - - conf.DB.Host = "127.0.0.1" - // if err != nil { - // slog.Error("could not get postgres host", slog.String("error", err.Error())) - // panic(err) - // } - conf.DB.Port = port.Int() - - dbClient, err := db.NewClient(conf.DB) - if err != nil { - slog.Error("issue creating database client", slog.String("error", err.Error())) - panic(err) - } - - applied, err := dbClient.RunMigrations() - if err != nil { - slog.Error("issue running migrations", slog.String("error", err.Error())) - panic(err) - } - - slog.Info("applied migrations", slog.Int("count", applied)) - - otdf, err := server.NewOpenTDFServer(conf.Server) - if err != nil { - slog.Error("issue creating opentdf server", slog.String("error", err.Error())) - panic(err) - } - defer otdf.Stop() - - slog.Info("starting opa engine") - // Start the opa engine - conf.OPA.Embedded = true - eng, err := opa.NewEngine(conf.OPA) - if err != nil { - slog.Error("could not start opa engine", slog.String("error", err.Error())) - panic(err) - } - defer eng.Stop(context.Background()) - - // Register the services - err = cmd.RegisterServices(*conf, otdf, dbClient, eng) - if err != nil { - slog.Error("issue registering services", slog.String("error", err.Error())) - panic(err) - } - - // Start the server - slog.Info("starting opentdf server", slog.Int("grpcPort", conf.Server.Grpc.Port), slog.Int("httpPort", conf.Server.HTTP.Port)) - otdf.Run() - - m.Run() -} diff --git a/tests/subjectmapping_test.go b/tests/subjectmapping_test.go deleted file mode 100644 index 0795012af0..0000000000 --- a/tests/subjectmapping_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package tests - -import ( - "testing" - - "github.com/opentdf/opentdf-v2-poc/sdk/subjectmapping" - "github.com/stretchr/testify/suite" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" -) - -type SubjectMappingSuite struct { - suite.Suite - conn *grpc.ClientConn - client subjectmapping.SubjectMappingServiceClient -} - -func (suite *SubjectMappingSuite) SetupSuite() { - conn, err := grpc.Dial("localhost:9000", grpc.WithTransportCredentials(insecure.NewCredentials())) - if err != nil { - suite.T().Fatal(err) - } - suite.conn = conn - - suite.client = subjectmapping.NewSubjectMappingServiceClient(conn) -} - -func (suite *SubjectMappingSuite) TearDownSuite() { - suite.conn.Close() -} - -func TestSubjectMappingSuite(t *testing.T) { - if testing.Short() { - t.Skip("skipping subject mapping integration tests") - } - suite.Run(t, new(SubjectMappingSuite)) -} diff --git a/tests/testdata/acre/resource_groups.json b/tests/testdata/acre/resource_groups.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/testdata/acre/resource_mappings.json b/tests/testdata/acre/resource_mappings.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/testdata/acre/resource_synonyms.json b/tests/testdata/acre/resource_synonyms.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/testdata/acse/subject_mappings.json b/tests/testdata/acse/subject_mappings.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/testdata/attributes/attribute_definitions.json b/tests/testdata/attributes/attribute_definitions.json deleted file mode 100644 index 762314b9db..0000000000 --- a/tests/testdata/attributes/attribute_definitions.json +++ /dev/null @@ -1,53 +0,0 @@ -[ - - { - "name": "architecture", - "rule":2, - "values": [ - { - "value": "collaborator" - }, - { - "value": "contributor" - }, - { - "value": "owner" - } - ], - "descriptor": { - "labels": { - "group": "architecture", - "owner": "virtru" - }, - "description": "this is a test attribute engineering", - "namespace": "virtru.com", - "name": "architecture", - "type":7 - } - }, - { - "name": "engineering", - "rule":2, - "values": [ - { - "value": "collaborator" - }, - { - "value": "contributor" - }, - { - "value": "owner" - } - ], - "descriptor": { - "labels": { - "group": "engineering", - "owner": "virtru" - }, - "description": "this is a test attribute engineering", - "namespace": "virtru.com", - "name": "engineering", - "type":7 - } - } -] \ No newline at end of file diff --git a/tests/testdata/attributes/attribute_groups.json b/tests/testdata/attributes/attribute_groups.json deleted file mode 100644 index e69de29bb2..0000000000