Skip to content

Commit

Permalink
Deprecation cleanup for 2.12 release (kedacore#4806)
Browse files Browse the repository at this point in the history
  • Loading branch information
dttung2905 authored and novicher committed Aug 26, 2023
1 parent ffea119 commit 1352c76
Show file tree
Hide file tree
Showing 31 changed files with 173 additions and 237 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ You can find all deprecations in [this overview](https://github.com/kedacore/ked

New deprecation(s):

- TODO ([#XXX](https://github.com/kedacore/keda/issues/XXX))
- **General**: Clean up previously deprecated code for 2.12 release ([#4899](https://github.com/kedacore/keda/issues/4899))

### Breaking Changes

Expand Down
2 changes: 1 addition & 1 deletion apis/keda/v1alpha1/scaledobject_webhook.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ func validateWorkload(so *ScaledObject, action string) (admission.Warnings, erro
}

func verifyTriggers(incomingSo *ScaledObject, action string) error {
err := ValidateTriggers(scaledobjectlog.WithValues("name", incomingSo.Name), incomingSo.Spec.Triggers)
err := ValidateTriggers(incomingSo.Spec.Triggers)
if err != nil {
scaledobjectlog.WithValues("name", incomingSo.Name).Error(err, "validation error")
prommetrics.RecordScaledObjectValidatingErrors(incomingSo.Namespace, action, "incorrect-triggers")
Expand Down
10 changes: 1 addition & 9 deletions apis/keda/v1alpha1/scaletriggers_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package v1alpha1
import (
"fmt"

"github.com/go-logr/logr"
autoscalingv2 "k8s.io/api/autoscaling/v2"
)

Expand Down Expand Up @@ -50,7 +49,7 @@ type AuthenticationRef struct {
// ValidateTriggers checks that general trigger metadata are valid, it checks:
// - triggerNames in ScaledObject are unique
// - useCachedMetrics is defined only for a supported triggers
func ValidateTriggers(logger logr.Logger, triggers []ScaleTriggers) error {
func ValidateTriggers(triggers []ScaleTriggers) error {
triggersCount := len(triggers)
if triggers != nil && triggersCount > 0 {
triggerNames := make(map[string]bool, triggersCount)
Expand All @@ -63,13 +62,6 @@ func ValidateTriggers(logger logr.Logger, triggers []ScaleTriggers) error {
}
}

// FIXME: DEPRECATED to be removed in v2.12
_, hasMetricName := trigger.Metadata["metricName"]
// aws-cloudwatch, huawei-cloudeye and azure-monitor have a meaningful use of metricName
if hasMetricName && trigger.Type != "aws-cloudwatch" && trigger.Type != "huawei-cloudeye" && trigger.Type != "azure-monitor" {
logger.Info("\"metricName\" is deprecated and will be removed in v2.12, please do not set it anymore", "trigger.type", trigger.Type)
}

name := trigger.Name
if name != "" {
if _, found := triggerNames[name]; found {
Expand Down
3 changes: 1 addition & 2 deletions apis/keda/v1alpha1/scaletriggers_types_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package v1alpha1
import (
"testing"

"github.com/go-logr/logr"
"github.com/stretchr/testify/assert"
)

Expand Down Expand Up @@ -90,7 +89,7 @@ func TestValidateTriggers(t *testing.T) {
for _, test := range tests {
tt := test
t.Run(test.name, func(t *testing.T) {
err := ValidateTriggers(logr.Discard(), tt.triggers)
err := ValidateTriggers(tt.triggers)
if test.expectedErrMsg == "" {
assert.NoError(t, err)
} else {
Expand Down
2 changes: 1 addition & 1 deletion controllers/keda/hpa.go
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ func (r *ScaledObjectReconciler) getScaledObjectMetricSpecs(ctx context.Context,
if metricSpec.External != nil {
externalMetricName := metricSpec.External.Metric.Name
if kedacontrollerutil.Contains(externalMetricNames, externalMetricName) {
return nil, fmt.Errorf("metricName %s defined multiple times in ScaledObject %s, please refer the documentation how to define metricName manually", externalMetricName, scaledObject.Name)
return nil, fmt.Errorf("metricName %s defined multiple times in ScaledObject %s", externalMetricName, scaledObject.Name)
}

// add the scaledobject.keda.sh/name label. This is how the MetricsAdapter will know which scaledobject a metric is for when the HPA queries it.
Expand Down
2 changes: 1 addition & 1 deletion controllers/keda/scaledobject_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ func (r *ScaledObjectReconciler) reconcileScaledObject(ctx context.Context, logg
return "ScaledObject doesn't have correct Idle/Min/Max Replica Counts specification", err
}

err = kedav1alpha1.ValidateTriggers(logger, scaledObject.Spec.Triggers)
err = kedav1alpha1.ValidateTriggers(scaledObject.Spec.Triggers)
if err != nil {
return "ScaledObject doesn't have correct triggers specification", err
}
Expand Down
6 changes: 4 additions & 2 deletions controllers/keda/scaledobject_controller_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ var _ = Describe("ScaledObjectController", func() {
)

var triggerMeta = []map[string]string{
{"serverAddress": "http://localhost:9090", "metricName": "http_requests_total", "threshold": "100", "query": "up", "disableScaleToZero": "true"},
{"serverAddress": "http://localhost:9090", "metricName": "http_requests_total2", "threshold": "100", "query": "up"},
{"serverAddress": "http://localhost:9090", "threshold": "100", "query": "up", "disableScaleToZero": "true"},
{"serverAddress": "http://localhost:9090", "threshold": "100", "query": "up"},
}

BeforeEach(func() {
Expand Down Expand Up @@ -97,6 +97,7 @@ var _ = Describe("ScaledObjectController", func() {
TriggerMetadata: tm,
ResolvedEnv: nil,
AuthParams: nil,
ScalerIndex: i,
}

s, err := scalers.NewPrometheusScaler(config)
Expand Down Expand Up @@ -221,6 +222,7 @@ var _ = Describe("ScaledObjectController", func() {

// Call function tobe tested
metricSpecs, err := metricNameTestReconciler.getScaledObjectMetricSpecs(context.Background(), testLogger, duplicateNamedScaledObject)
Ω(err).ShouldNot(BeNil())

// Test that the status was not updated
Ω(duplicateNamedScaledObject.Status.ExternalMetricNames).Should(BeNil())
Expand Down
1 change: 0 additions & 1 deletion pkg/scalers/azure/azure_blob.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ type BlobMetadata struct {
BlobPrefix string
Connection string
AccountName string
MetricName string
EndpointSuffix string
ScalerIndex int
GlobPattern *glob.Glob
Expand Down
10 changes: 1 addition & 9 deletions pkg/scalers/azure_blob_scaler.go
Original file line number Diff line number Diff line change
Expand Up @@ -142,14 +142,6 @@ func parseAzureBlobMetadata(config *ScalerConfig, logger logr.Logger) (*azure.Bl
if val, ok := config.TriggerMetadata["useAAdPodIdentity"]; ok && config.PodIdentity.Provider == "" && val == stringTrue {
config.PodIdentity = kedav1alpha1.AuthPodIdentity{Provider: kedav1alpha1.PodIdentityProviderAzure}
}

// FIXME: DEPRECATED to be removed in v2.12
if val, ok := config.TriggerMetadata["metricName"]; ok {
meta.MetricName = kedautil.NormalizeString(fmt.Sprintf("azure-blob-%s", val))
} else {
meta.MetricName = kedautil.NormalizeString(fmt.Sprintf("azure-blob-%s", meta.BlobContainerName))
}

// If the Use AAD Pod Identity is not present, or set to "none"
// then check for connection string
switch config.PodIdentity.Provider {
Expand Down Expand Up @@ -188,7 +180,7 @@ func (s *azureBlobScaler) Close(context.Context) error {
func (s *azureBlobScaler) GetMetricSpecForScaling(context.Context) []v2.MetricSpec {
externalMetric := &v2.ExternalMetricSource{
Metric: v2.MetricIdentifier{
Name: GenerateMetricNameWithIndex(s.metadata.ScalerIndex, s.metadata.MetricName),
Name: GenerateMetricNameWithIndex(s.metadata.ScalerIndex, kedautil.NormalizeString(fmt.Sprintf("azure-blob-%s", s.metadata.BlobContainerName))),
},
Target: GetMetricTarget(s.metricType, s.metadata.TargetBlobCount),
}
Expand Down
5 changes: 1 addition & 4 deletions pkg/scalers/azure_blob_scaler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,6 @@ var testAzBlobMetadata = []parseAzBlobMetadataTestData{
{map[string]string{}, true, testAzBlobResolvedEnv, map[string]string{}, ""},
// properly formed
{map[string]string{"connectionFromEnv": "CONNECTION", "blobContainerName": "sample", "blobCount": "5", "blobDelimiter": "/", "blobPrefix": "blobsubpath"}, false, testAzBlobResolvedEnv, map[string]string{}, ""},
// properly formed with metricName
{map[string]string{"connectionFromEnv": "CONNECTION", "blobContainerName": "sample", "blobCount": "5", "blobDelimiter": "/", "blobPrefix": "blobsubpath", "metricName": "customname"}, false, testAzBlobResolvedEnv, map[string]string{}, ""},
// Empty blobcontainerName
{map[string]string{"connectionFromEnv": "CONNECTION", "blobContainerName": ""}, true, testAzBlobResolvedEnv, map[string]string{}, ""},
// improperly formed blobCount
Expand Down Expand Up @@ -105,8 +103,7 @@ var testAzBlobMetadata = []parseAzBlobMetadataTestData{

var azBlobMetricIdentifiers = []azBlobMetricIdentifier{
{&testAzBlobMetadata[1], 0, "s0-azure-blob-sample"},
{&testAzBlobMetadata[2], 1, "s1-azure-blob-customname"},
{&testAzBlobMetadata[6], 2, "s2-azure-blob-sample_container"},
{&testAzBlobMetadata[5], 1, "s1-azure-blob-sample_container"},
}

func TestAzBlobParseMetadata(t *testing.T) {
Expand Down
17 changes: 3 additions & 14 deletions pkg/scalers/azure_log_analytics_scaler.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ type azureLogAnalyticsMetadata struct {
query string
threshold float64
activationThreshold float64
metricName string // Custom metric name for trigger
scalerIndex int
logAnalyticsResourceURL string
activeDirectoryEndpoint string
Expand Down Expand Up @@ -180,7 +179,7 @@ func parseAzureLogAnalyticsMetadata(config *ScalerConfig) (*azureLogAnalyticsMet
}
meta.query = query

// Getting threshold, observe that we dont check AuthParams for threshold
// Getting threshold, observe that we don't check AuthParams for threshold
val, err := getParameterFromConfig(config, "threshold", false)
if err != nil {
return nil, err
Expand All @@ -201,16 +200,6 @@ func parseAzureLogAnalyticsMetadata(config *ScalerConfig) (*azureLogAnalyticsMet
}
meta.activationThreshold = activationThreshold
}

// Resolve metricName

// FIXME: DEPRECATED to be removed in v2.12
if val, ok := config.TriggerMetadata["metricName"]; ok {
meta.metricName = kedautil.NormalizeString(fmt.Sprintf("%s-%s", "azure-log-analytics", val))
} else {
meta.metricName = kedautil.NormalizeString(fmt.Sprintf("%s-%s", "azure-log-analytics", meta.workspaceID))
}

meta.scalerIndex = config.ScalerIndex

meta.logAnalyticsResourceURL = defaultLogAnalyticsResourceURL
Expand All @@ -234,7 +223,7 @@ func parseAzureLogAnalyticsMetadata(config *ScalerConfig) (*azureLogAnalyticsMet
}
meta.activeDirectoryEndpoint = activeDirectoryEndpoint

// Getting unsafeSsl, observe that we dont check AuthParams for unsafeSsl
// Getting unsafeSsl, observe that we don't check AuthParams for unsafeSsl
meta.unsafeSsl = false
unsafeSslVal, err := getParameterFromConfig(config, "unsafeSsl", false)
if err == nil {
Expand Down Expand Up @@ -264,7 +253,7 @@ func getParameterFromConfig(config *ScalerConfig, parameter string, checkAuthPar
func (s *azureLogAnalyticsScaler) GetMetricSpecForScaling(context.Context) []v2.MetricSpec {
externalMetric := &v2.ExternalMetricSource{
Metric: v2.MetricIdentifier{
Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName),
Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s", "azure-log-analytics", s.metadata.workspaceID))),
},
Target: GetMetricTargetMili(s.metricType, s.metadata.threshold),
}
Expand Down
27 changes: 0 additions & 27 deletions pkg/scalers/azure_log_analytics_scaler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -213,33 +213,6 @@ func TestLogAnalyticsGetMetricSpecForScaling(t *testing.T) {
}
}

type parseMetadataMetricNameTestData struct {
metadata map[string]string
scalerIndex int
metricName string
}

var testParseMetadataMetricName = []parseMetadataMetricNameTestData{
// WorkspaceID
{map[string]string{"tenantIdFromEnv": "d248da64-0e1e-4f79-b8c6-72ab7aa055eb", "clientIdFromEnv": "41826dd4-9e0a-4357-a5bd-a88ad771ea7d", "clientSecretFromEnv": "U6DtAX5r6RPZxd~l12Ri3X8J9urt5Q-xs", "workspaceIdFromEnv": "074dd9f8-c368-4220-9400-acb6e80fc325", "query": query, "threshold": "1900000000"}, 0, "azure-log-analytics-074dd9f8-c368-4220-9400-acb6e80fc325"},
// Custom Name
{map[string]string{"metricName": "testName", "tenantIdFromEnv": "d248da64-0e1e-4f79-b8c6-72ab7aa055eb", "clientIdFromEnv": "41826dd4-9e0a-4357-a5bd-a88ad771ea7d", "clientSecretFromEnv": "U6DtAX5r6RPZxd~l12Ri3X8J9urt5Q-xs", "workspaceIdFromEnv": "074dd9f8-c368-4220-9400-acb6e80fc325", "query": query, "threshold": "1900000000"}, 1, "azure-log-analytics-testName"},
}

func TestLogAnalyticsParseMetadataMetricName(t *testing.T) {
for _, testData := range testParseMetadataMetricName {
meta, err := parseAzureLogAnalyticsMetadata(&ScalerConfig{ResolvedEnv: sampleLogAnalyticsResolvedEnv,
TriggerMetadata: testData.metadata, AuthParams: nil,
PodIdentity: kedav1alpha1.AuthPodIdentity{}, ScalerIndex: testData.scalerIndex})
if err != nil {
t.Error("Expected success but got error", err)
}
if meta.metricName != testData.metricName {
t.Errorf("Expected %s but got %s", testData.metricName, meta.metricName)
}
}
}

type parseLogAnalyticsMetadataTestUnsafeSsl struct {
metadata map[string]string
unsafeSsl bool
Expand Down
11 changes: 1 addition & 10 deletions pkg/scalers/cassandra_scaler.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ type CassandraMetadata struct {
query string
targetQueryValue int64
activationTargetQueryValue int64
metricName string
scalerIndex int
}

Expand Down Expand Up @@ -147,14 +146,6 @@ func parseCassandraMetadata(config *ScalerConfig) (*CassandraMetadata, error) {
} else {
return nil, fmt.Errorf("no keyspace given")
}

// FIXME: DEPRECATED to be removed in v2.12
if val, ok := config.TriggerMetadata["metricName"]; ok {
meta.metricName = kedautil.NormalizeString(fmt.Sprintf("cassandra-%s", val))
} else {
meta.metricName = kedautil.NormalizeString(fmt.Sprintf("cassandra-%s", meta.keyspace))
}

if val, ok := config.AuthParams["password"]; ok {
meta.password = val
} else {
Expand Down Expand Up @@ -189,7 +180,7 @@ func newCassandraSession(meta *CassandraMetadata, logger logr.Logger) (*gocql.Se
func (s *cassandraScaler) GetMetricSpecForScaling(context.Context) []v2.MetricSpec {
externalMetric := &v2.ExternalMetricSource{
Metric: v2.MetricIdentifier{
Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName),
Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("cassandra-%s", s.metadata.keyspace))),
},
Target: GetMetricTarget(s.metricType, s.metadata.targetQueryValue),
}
Expand Down
22 changes: 11 additions & 11 deletions pkg/scalers/cassandra_scaler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,29 +24,29 @@ var testCassandraMetadata = []parseCassandraMetadataTestData{
// nothing passed
{map[string]string{}, true, map[string]string{}},
// everything is passed in verbatim
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "port": "9042", "clusterIPAddress": "cassandra.test", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, false, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no metricName passed, metricName is generated from keyspace
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "port": "9042", "clusterIPAddress": "cassandra.test", "keyspace": "test_keyspace", "ScalerIndex": "0"}, false, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// metricName is generated from keyspace
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0"}, false, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no query passed
{map[string]string{"targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no targetQueryValue passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no username passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no port passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test", "keyspace": "test_keyspace", "ScalerIndex": "0"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no clusterIPAddress passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "port": "9042", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "port": "9042", "keyspace": "test_keyspace", "ScalerIndex": "0"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no keyspace passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "ScalerIndex": "0"}, true, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
// no password passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, true, map[string]string{}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "clusterIPAddress": "cassandra.test:9042", "keyspace": "test_keyspace", "ScalerIndex": "0"}, true, map[string]string{}},
// fix issue[4110] passed
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "port": "9042", "clusterIPAddress": "https://cassandra.test", "keyspace": "test_keyspace", "ScalerIndex": "0", "metricName": "myMetric"}, false, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
{map[string]string{"query": "SELECT COUNT(*) FROM test_keyspace.test_table;", "targetQueryValue": "1", "username": "cassandra", "port": "9042", "clusterIPAddress": "https://cassandra.test", "keyspace": "test_keyspace", "ScalerIndex": "0"}, false, map[string]string{"password": "Y2Fzc2FuZHJhCg=="}},
}

var cassandraMetricIdentifiers = []cassandraMetricIdentifier{
{&testCassandraMetadata[1], 0, "s0-cassandra-myMetric"},
{&testCassandraMetadata[1], 0, "s0-cassandra-test_keyspace"},
{&testCassandraMetadata[2], 1, "s1-cassandra-test_keyspace"},
}

Expand Down
Loading

0 comments on commit 1352c76

Please sign in to comment.