From 4f8570a6aa06881c93438ddf0184fe1143fbfa95 Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Sun, 10 Oct 2021 13:36:03 +0200 Subject: [PATCH 01/11] Update artemis, azure and aws scalers to handle scalerIndex Signed-off-by: Jorge Turrado --- pkg/scalers/artemis_scaler.go | 6 +- pkg/scalers/artemis_scaler_test.go | 6 +- pkg/scalers/aws_cloudwatch_scaler.go | 6 +- pkg/scalers/aws_cloudwatch_test.go | 6 +- pkg/scalers/aws_kinesis_stream_scaler.go | 5 +- pkg/scalers/aws_kinesis_stream_test.go | 91 ++++++++++++------- pkg/scalers/aws_sqs_queue_scaler.go | 5 +- pkg/scalers/aws_sqs_queue_test.go | 6 +- pkg/scalers/azure_blob_scaler.go | 5 +- pkg/scalers/azure_blob_scaler_test.go | 9 +- pkg/scalers/azure_eventhub_scaler.go | 5 +- pkg/scalers/azure_eventhub_scaler_test.go | 6 +- pkg/scalers/azure_log_analytics_scaler.go | 5 +- .../azure_log_analytics_scaler_test.go | 17 ++-- pkg/scalers/azure_monitor_scaler.go | 5 +- pkg/scalers/azure_monitor_scaler_test.go | 6 +- pkg/scalers/azure_pipelines_scaler.go | 5 +- pkg/scalers/azure_pipelines_scaler_test.go | 6 +- pkg/scalers/azure_queue_scaler.go | 5 +- pkg/scalers/azure_queue_scaler_test.go | 7 +- pkg/scalers/azure_servicebus_scaler.go | 5 +- pkg/scalers/azure_servicebus_scaler_test.go | 9 +- pkg/scalers/rabbitmq_scaler.go | 27 +++--- pkg/scalers/rabbitmq_scaler_test.go | 9 +- pkg/scalers/scaler.go | 8 ++ pkg/scaling/scale_handler.go | 5 +- 26 files changed, 183 insertions(+), 92 deletions(-) diff --git a/pkg/scalers/artemis_scaler.go b/pkg/scalers/artemis_scaler.go index f52b500bd66..a9c1fc7c16e 100644 --- a/pkg/scalers/artemis_scaler.go +++ b/pkg/scalers/artemis_scaler.go @@ -36,6 +36,7 @@ type artemisMetadata struct { restAPITemplate string queueLength int corsHeader string + scalerIndex int } //revive:enable:var-naming @@ -153,6 +154,9 @@ func parseArtemisMetadata(config *ScalerConfig) (*artemisMetadata, error) { if meta.password == "" { return nil, fmt.Errorf("password cannot be empty") } + + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -250,7 +254,7 @@ func (s *artemisScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(s.metadata.queueLength), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "artemis", s.metadata.brokerName, s.metadata.queueName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "artemis", s.metadata.brokerName, s.metadata.queueName))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/artemis_scaler_test.go b/pkg/scalers/artemis_scaler_test.go index cba1022fd15..7f127ddd6e2 100644 --- a/pkg/scalers/artemis_scaler_test.go +++ b/pkg/scalers/artemis_scaler_test.go @@ -17,6 +17,7 @@ type parseArtemisMetadataTestData struct { type artemisMetricIdentifier struct { metadataTestData *parseArtemisMetadataTestData + scalerIndex int name string } @@ -59,7 +60,8 @@ var testArtemisMetadata = []parseArtemisMetadataTestData{ } var artemisMetricIdentifiers = []artemisMetricIdentifier{ - {&testArtemisMetadata[7], "artemis-broker-activemq-queue1"}, + {&testArtemisMetadata[7], 0, "s0-artemis-broker-activemq-queue1"}, + {&testArtemisMetadata[7], 1, "s1-artemis-broker-activemq-queue1"}, } var testArtemisMetadataWithEmptyAuthParams = []parseArtemisMetadataTestData{ @@ -141,7 +143,7 @@ func TestArtemisParseMetadata(t *testing.T) { func TestArtemisGetMetricSpecForScaling(t *testing.T) { for _, testData := range artemisMetricIdentifiers { - meta, err := parseArtemisMetadata(&ScalerConfig{ResolvedEnv: sampleArtemisResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil}) + meta, err := parseArtemisMetadata(&ScalerConfig{ResolvedEnv: sampleArtemisResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/aws_cloudwatch_scaler.go b/pkg/scalers/aws_cloudwatch_scaler.go index e83aaae0f0a..3bba2e2ae9f 100644 --- a/pkg/scalers/aws_cloudwatch_scaler.go +++ b/pkg/scalers/aws_cloudwatch_scaler.go @@ -49,6 +49,8 @@ type awsCloudwatchMetadata struct { awsRegion string awsAuthorization awsAuthorizationMetadata + + scalerIndex int } var cloudwatchLog = logf.Log.WithName("aws_cloudwatch_scaler") @@ -189,6 +191,8 @@ func parseAwsCloudwatchMetadata(config *ScalerConfig) (*awsCloudwatchMetadata, e meta.awsAuthorization = auth + meta.scalerIndex = config.ScalerIndex + return meta, nil } @@ -213,7 +217,7 @@ func (c *awsCloudwatchScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(c.metadata.targetMetricValue), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "aws-cloudwatch", c.metadata.namespace, c.metadata.dimensionName[0], c.metadata.dimensionValue[0])), + Name: GenerateMetricNameWithIndex(c.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "aws-cloudwatch", c.metadata.namespace, c.metadata.dimensionName[0], c.metadata.dimensionValue[0]))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/aws_cloudwatch_test.go b/pkg/scalers/aws_cloudwatch_test.go index 5b37a8fd5c4..d39e57977ff 100644 --- a/pkg/scalers/aws_cloudwatch_test.go +++ b/pkg/scalers/aws_cloudwatch_test.go @@ -28,6 +28,7 @@ type parseAWSCloudwatchMetadataTestData struct { type awsCloudwatchMetricIdentifier struct { metadataTestData *parseAWSCloudwatchMetadataTestData + scalerIndex int name string } @@ -233,7 +234,8 @@ var testAWSCloudwatchMetadata = []parseAWSCloudwatchMetadataTestData{ } var awsCloudwatchMetricIdentifiers = []awsCloudwatchMetricIdentifier{ - {&testAWSCloudwatchMetadata[1], "aws-cloudwatch-AWS-SQS-QueueName-keda"}, + {&testAWSCloudwatchMetadata[1], 0, "s0-aws-cloudwatch-AWS-SQS-QueueName-keda"}, + {&testAWSCloudwatchMetadata[1], 3, "s3-aws-cloudwatch-AWS-SQS-QueueName-keda"}, } func TestCloudwatchParseMetadata(t *testing.T) { @@ -250,7 +252,7 @@ func TestCloudwatchParseMetadata(t *testing.T) { func TestAWSCloudwatchGetMetricSpecForScaling(t *testing.T) { for _, testData := range awsCloudwatchMetricIdentifiers { - meta, err := parseAwsCloudwatchMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testAWSCloudwatchResolvedEnv, AuthParams: testData.metadataTestData.authParams}) + meta, err := parseAwsCloudwatchMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testAWSCloudwatchResolvedEnv, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/aws_kinesis_stream_scaler.go b/pkg/scalers/aws_kinesis_stream_scaler.go index c7c57e71807..4bd3482c40a 100644 --- a/pkg/scalers/aws_kinesis_stream_scaler.go +++ b/pkg/scalers/aws_kinesis_stream_scaler.go @@ -34,6 +34,7 @@ type awsKinesisStreamMetadata struct { streamName string awsRegion string awsAuthorization awsAuthorizationMetadata + scalerIndex int } var kinesisStreamLog = logf.Log.WithName("aws_kinesis_stream_scaler") @@ -83,6 +84,8 @@ func parseAwsKinesisStreamMetadata(config *ScalerConfig) (*awsKinesisStreamMetad meta.awsAuthorization = auth + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -105,7 +108,7 @@ func (s *awsKinesisStreamScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec targetShardCountQty := resource.NewQuantity(int64(s.metadata.targetShardCount), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "AWS-Kinesis-Stream", s.metadata.streamName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s", "AWS-Kinesis-Stream", s.metadata.streamName))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/aws_kinesis_stream_test.go b/pkg/scalers/aws_kinesis_stream_test.go index b4fe05c3d5e..232d0d2755f 100644 --- a/pkg/scalers/aws_kinesis_stream_test.go +++ b/pkg/scalers/aws_kinesis_stream_test.go @@ -19,15 +19,17 @@ var testAWSKinesisAuthentication = map[string]string{ } type parseAWSKinesisMetadataTestData struct { - metadata map[string]string - expected *awsKinesisStreamMetadata - authParams map[string]string - isError bool - comment string + metadata map[string]string + expected *awsKinesisStreamMetadata + authParams map[string]string + isError bool + comment string + scalerIndex int } type awsKinesisMetricIdentifier struct { metadataTestData *parseAWSKinesisMetadataTestData + scalerIndex int name string } @@ -53,27 +55,34 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ awsSecretAccessKey: testAWSKinesisSecretAccessKey, podIdentityOwner: true, }, + scalerIndex: 0, }, - isError: false, - comment: "properly formed stream name and region"}, + isError: false, + comment: "properly formed stream name and region", + scalerIndex: 0, + }, { metadata: map[string]string{ "streamName": "", "shardCount": "2", "awsRegion": testAWSRegion}, - authParams: testAWSKinesisAuthentication, - expected: &awsKinesisStreamMetadata{}, - isError: true, - comment: "missing stream name"}, + authParams: testAWSKinesisAuthentication, + expected: &awsKinesisStreamMetadata{}, + isError: true, + comment: "missing stream name", + scalerIndex: 1, + }, { metadata: map[string]string{ "streamName": testAWSKinesisStreamName, "shardCount": "2", "awsRegion": ""}, - authParams: testAWSKinesisAuthentication, - expected: &awsKinesisStreamMetadata{}, - isError: true, - comment: "properly formed stream name, empty region"}, + authParams: testAWSKinesisAuthentication, + expected: &awsKinesisStreamMetadata{}, + isError: true, + comment: "properly formed stream name, empty region", + scalerIndex: 2, + }, { metadata: map[string]string{ "streamName": testAWSKinesisStreamName, @@ -89,9 +98,12 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ awsSecretAccessKey: testAWSKinesisSecretAccessKey, podIdentityOwner: true, }, + scalerIndex: 3, }, - isError: false, - comment: "properly formed stream name and region, empty shard count"}, + isError: false, + comment: "properly formed stream name and region, empty shard count", + scalerIndex: 3, + }, { metadata: map[string]string{ "streamName": testAWSKinesisStreamName, @@ -107,10 +119,12 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ awsSecretAccessKey: testAWSKinesisSecretAccessKey, podIdentityOwner: true, }, + scalerIndex: 4, }, - isError: false, - comment: "properly formed stream name and region, wrong shard count"}, - + isError: false, + comment: "properly formed stream name and region, wrong shard count", + scalerIndex: 4, + }, { metadata: map[string]string{ "streamName": testAWSKinesisStreamName, @@ -120,9 +134,11 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ "awsAccessKeyID": "", "awsSecretAccessKey": testAWSKinesisSecretAccessKey, }, - expected: &awsKinesisStreamMetadata{}, - isError: true, - comment: "with AWS Credentials from TriggerAuthentication, missing Access Key Id"}, + expected: &awsKinesisStreamMetadata{}, + isError: true, + comment: "with AWS Credentials from TriggerAuthentication, missing Access Key Id", + scalerIndex: 5, + }, {metadata: map[string]string{ "streamName": testAWSKinesisStreamName, "shardCount": "2", @@ -131,9 +147,11 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ "awsAccessKeyID": testAWSKinesisAccessKeyID, "awsSecretAccessKey": "", }, - expected: &awsKinesisStreamMetadata{}, - isError: true, - comment: "with AWS Credentials from TriggerAuthentication, missing Secret Access Key"}, + expected: &awsKinesisStreamMetadata{}, + isError: true, + comment: "with AWS Credentials from TriggerAuthentication, missing Secret Access Key", + scalerIndex: 6, + }, {metadata: map[string]string{ "streamName": testAWSKinesisStreamName, "shardCount": "2", @@ -149,9 +167,12 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ awsRoleArn: testAWSKinesisRoleArn, podIdentityOwner: true, }, + scalerIndex: 7, }, - isError: false, - comment: "with AWS Role from TriggerAuthentication"}, + isError: false, + comment: "with AWS Role from TriggerAuthentication", + scalerIndex: 7, + }, {metadata: map[string]string{ "streamName": testAWSKinesisStreamName, "shardCount": "2", @@ -165,18 +186,22 @@ var testAWSKinesisMetadata = []parseAWSKinesisMetadataTestData{ awsAuthorization: awsAuthorizationMetadata{ podIdentityOwner: false, }, + scalerIndex: 8, }, - isError: false, - comment: "with AWS Role assigned on KEDA operator itself"}, + isError: false, + comment: "with AWS Role assigned on KEDA operator itself", + scalerIndex: 8, + }, } var awsKinesisMetricIdentifiers = []awsKinesisMetricIdentifier{ - {&testAWSKinesisMetadata[1], "AWS-Kinesis-Stream-test"}, + {&testAWSKinesisMetadata[1], 0, "s0-AWS-Kinesis-Stream-test"}, + {&testAWSKinesisMetadata[1], 1, "s1-AWS-Kinesis-Stream-test"}, } func TestKinesisParseMetadata(t *testing.T) { for _, testData := range testAWSKinesisMetadata { - result, err := parseAwsKinesisStreamMetadata(&ScalerConfig{TriggerMetadata: testData.metadata, ResolvedEnv: testAWSKinesisAuthentication, AuthParams: testData.authParams}) + result, err := parseAwsKinesisStreamMetadata(&ScalerConfig{TriggerMetadata: testData.metadata, ResolvedEnv: testAWSKinesisAuthentication, AuthParams: testData.authParams, ScalerIndex: testData.scalerIndex}) if err != nil && !testData.isError { t.Errorf("Expected success because %s got error, %s", testData.comment, err) } @@ -192,7 +217,7 @@ func TestKinesisParseMetadata(t *testing.T) { func TestAWSKinesisGetMetricSpecForScaling(t *testing.T) { for _, testData := range awsKinesisMetricIdentifiers { - meta, err := parseAwsKinesisStreamMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testAWSKinesisAuthentication, AuthParams: testData.metadataTestData.authParams}) + meta, err := parseAwsKinesisStreamMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testAWSKinesisAuthentication, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/aws_sqs_queue_scaler.go b/pkg/scalers/aws_sqs_queue_scaler.go index c4ea51ce41c..2f4bc31d129 100644 --- a/pkg/scalers/aws_sqs_queue_scaler.go +++ b/pkg/scalers/aws_sqs_queue_scaler.go @@ -45,6 +45,7 @@ type awsSqsQueueMetadata struct { queueName string awsRegion string awsAuthorization awsAuthorizationMetadata + scalerIndex int } // NewAwsSqsQueueScaler creates a new awsSqsQueueScaler @@ -105,6 +106,8 @@ func parseAwsSqsQueueMetadata(config *ScalerConfig) (*awsSqsQueueMetadata, error meta.awsAuthorization = auth + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -127,7 +130,7 @@ func (s *awsSqsQueueScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetQueueLengthQty := resource.NewQuantity(int64(s.metadata.targetQueueLength), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "AWS-SQS-Queue", s.metadata.queueName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s", "AWS-SQS-Queue", s.metadata.queueName))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/aws_sqs_queue_test.go b/pkg/scalers/aws_sqs_queue_test.go index c4ef1bcdcd5..1fd34870fda 100644 --- a/pkg/scalers/aws_sqs_queue_test.go +++ b/pkg/scalers/aws_sqs_queue_test.go @@ -28,6 +28,7 @@ type parseAWSSQSMetadataTestData struct { type awsSQSMetricIdentifier struct { metadataTestData *parseAWSSQSMetadataTestData + scalerIndex int name string } @@ -131,7 +132,8 @@ var testAWSSQSMetadata = []parseAWSSQSMetadataTestData{ } var awsSQSMetricIdentifiers = []awsSQSMetricIdentifier{ - {&testAWSSQSMetadata[1], "AWS-SQS-Queue-DeleteArtifactQ"}, + {&testAWSSQSMetadata[1], 0, "s0-AWS-SQS-Queue-DeleteArtifactQ"}, + {&testAWSSQSMetadata[1], 1, "s1-AWS-SQS-Queue-DeleteArtifactQ"}, } func TestSQSParseMetadata(t *testing.T) { @@ -148,7 +150,7 @@ func TestSQSParseMetadata(t *testing.T) { func TestAWSSQSGetMetricSpecForScaling(t *testing.T) { for _, testData := range awsSQSMetricIdentifiers { - meta, err := parseAwsSqsQueueMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testAWSSQSAuthentication, AuthParams: testData.metadataTestData.authParams}) + meta, err := parseAwsSqsQueueMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testAWSSQSAuthentication, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/azure_blob_scaler.go b/pkg/scalers/azure_blob_scaler.go index f8b0dfb23ae..764cb0c7b67 100644 --- a/pkg/scalers/azure_blob_scaler.go +++ b/pkg/scalers/azure_blob_scaler.go @@ -56,6 +56,7 @@ type azureBlobMetadata struct { accountName string metricName string endpointSuffix string + scalerIndex int } var azureBlobLog = logf.Log.WithName("azure_blob_scaler") @@ -152,6 +153,8 @@ func parseAzureBlobMetadata(config *ScalerConfig) (*azureBlobMetadata, kedav1alp return nil, "", fmt.Errorf("pod identity %s not supported for azure storage blobs", config.PodIdentity) } + meta.scalerIndex = config.ScalerIndex + return &meta, config.PodIdentity, nil } @@ -185,7 +188,7 @@ func (s *azureBlobScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetBlobCount := resource.NewQuantity(int64(s.metadata.targetBlobCount), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_blob_scaler_test.go b/pkg/scalers/azure_blob_scaler_test.go index a3585f073f4..2b4a96613c2 100644 --- a/pkg/scalers/azure_blob_scaler_test.go +++ b/pkg/scalers/azure_blob_scaler_test.go @@ -37,6 +37,7 @@ type parseAzBlobMetadataTestData struct { type azBlobMetricIdentifier struct { metadataTestData *parseAzBlobMetadataTestData + scalerIndex int name string } @@ -72,9 +73,9 @@ var testAzBlobMetadata = []parseAzBlobMetadataTestData{ } var azBlobMetricIdentifiers = []azBlobMetricIdentifier{ - {&testAzBlobMetadata[1], "azure-blob-sample-blobsubpath-"}, - {&testAzBlobMetadata[2], "azure-blob-customname"}, - {&testAzBlobMetadata[5], "azure-blob-sample_container"}, + {&testAzBlobMetadata[1], 0, "s0-azure-blob-sample-blobsubpath-"}, + {&testAzBlobMetadata[2], 1, "s1-azure-blob-customname"}, + {&testAzBlobMetadata[5], 2, "s2-azure-blob-sample_container"}, } func TestAzBlobParseMetadata(t *testing.T) { @@ -94,7 +95,7 @@ func TestAzBlobParseMetadata(t *testing.T) { func TestAzBlobGetMetricSpecForScaling(t *testing.T) { for _, testData := range azBlobMetricIdentifiers { - meta, podIdentity, err := parseAzureBlobMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity}) + meta, podIdentity, err := parseAzureBlobMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/azure_eventhub_scaler.go b/pkg/scalers/azure_eventhub_scaler.go index c281477c1fb..90633b7071a 100644 --- a/pkg/scalers/azure_eventhub_scaler.go +++ b/pkg/scalers/azure_eventhub_scaler.go @@ -58,6 +58,7 @@ type azureEventHubScaler struct { type eventHubMetadata struct { eventHubInfo azure.EventHubInfo threshold int64 + scalerIndex int } // NewAzureEventHubScaler creates a new scaler for eventHub @@ -152,6 +153,8 @@ func parseAzureEventHubMetadata(config *ScalerConfig) (*eventHubMetadata, error) } } + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -252,7 +255,7 @@ func (scaler *azureEventHubScaler) GetMetricSpecForScaling() []v2beta2.MetricSpe targetMetricVal := resource.NewQuantity(scaler.metadata.threshold, resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "azure-eventhub", scaler.metadata.eventHubInfo.EventHubConnection, scaler.metadata.eventHubInfo.EventHubConsumerGroup)), + Name: GenerateMetricNameWithIndex(scaler.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "azure-eventhub", scaler.metadata.eventHubInfo.EventHubConnection, scaler.metadata.eventHubInfo.EventHubConsumerGroup))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_eventhub_scaler_test.go b/pkg/scalers/azure_eventhub_scaler_test.go index 0aa55d90bbd..a7e0748007e 100644 --- a/pkg/scalers/azure_eventhub_scaler_test.go +++ b/pkg/scalers/azure_eventhub_scaler_test.go @@ -31,6 +31,7 @@ type parseEventHubMetadataTestData struct { type eventHubMetricIdentifier struct { metadataTestData *parseEventHubMetadataTestData + scalerIndex int name string } @@ -65,7 +66,8 @@ var parseEventHubMetadataDatasetWithPodIdentity = []parseEventHubMetadataTestDat } var eventHubMetricIdentifiers = []eventHubMetricIdentifier{ - {&parseEventHubMetadataDataset[1], "azure-eventhub-none-testEventHubConsumerGroup"}, + {&parseEventHubMetadataDataset[1], 0, "s0-azure-eventhub-none-testEventHubConsumerGroup"}, + {&parseEventHubMetadataDataset[1], 1, "s1-azure-eventhub-none-testEventHubConsumerGroup"}, } var testEventHubScaler = azureEventHubScaler{ @@ -435,7 +437,7 @@ func DeleteContainerInStorage(ctx context.Context, endpoint *url.URL, credential func TestEventHubGetMetricSpecForScaling(t *testing.T) { for _, testData := range eventHubMetricIdentifiers { - meta, err := parseAzureEventHubMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: sampleEventHubResolvedEnv, AuthParams: map[string]string{}}) + meta, err := parseAzureEventHubMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: sampleEventHubResolvedEnv, AuthParams: map[string]string{}, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/azure_log_analytics_scaler.go b/pkg/scalers/azure_log_analytics_scaler.go index f122006bc90..6d69067be62 100644 --- a/pkg/scalers/azure_log_analytics_scaler.go +++ b/pkg/scalers/azure_log_analytics_scaler.go @@ -65,6 +65,7 @@ type azureLogAnalyticsMetadata struct { query string threshold int64 metricName string // Custom metric name for trigger + scalerIndex int } type sessionCache struct { @@ -185,6 +186,8 @@ func parseAzureLogAnalyticsMetadata(config *ScalerConfig) (*azureLogAnalyticsMet meta.metricName = kedautil.NormalizeString(fmt.Sprintf("%s-%s", "azure-log-analytics", meta.workspaceID)) } + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -222,7 +225,7 @@ func (s *azureLogAnalyticsScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_log_analytics_scaler_test.go b/pkg/scalers/azure_log_analytics_scaler_test.go index 864b9de19cc..bb93d7af4da 100644 --- a/pkg/scalers/azure_log_analytics_scaler_test.go +++ b/pkg/scalers/azure_log_analytics_scaler_test.go @@ -37,6 +37,7 @@ type parseLogAnalyticsMetadataTestData struct { type LogAnalyticsMetricIdentifier struct { metadataTestData *parseLogAnalyticsMetadataTestData + scalerIndex int name string } @@ -90,7 +91,8 @@ var testLogAnalyticsMetadata = []parseLogAnalyticsMetadataTestData{ } var LogAnalyticsMetricIdentifiers = []LogAnalyticsMetricIdentifier{ - {&testLogAnalyticsMetadata[7], "azure-log-analytics-074dd9f8-c368-4220-9400-acb6e80fc325"}, + {&testLogAnalyticsMetadata[7], 0, "s0-azure-log-analytics-074dd9f8-c368-4220-9400-acb6e80fc325"}, + {&testLogAnalyticsMetadata[7], 1, "s1-azure-log-analytics-074dd9f8-c368-4220-9400-acb6e80fc325"}, } var testLogAnalyticsMetadataWithEmptyAuthParams = []parseLogAnalyticsMetadataTestData{ @@ -159,7 +161,7 @@ func TestLogAnalyticsParseMetadata(t *testing.T) { func TestLogAnalyticsGetMetricSpecForScaling(t *testing.T) { for _, testData := range LogAnalyticsMetricIdentifiers { - meta, err := parseAzureLogAnalyticsMetadata(&ScalerConfig{ResolvedEnv: sampleLogAnalyticsResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil, PodIdentity: ""}) + meta, err := parseAzureLogAnalyticsMetadata(&ScalerConfig{ResolvedEnv: sampleLogAnalyticsResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil, PodIdentity: "", ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } @@ -181,20 +183,21 @@ func TestLogAnalyticsGetMetricSpecForScaling(t *testing.T) { } type parseMetadataMetricNameTestData struct { - metadata map[string]string - metricName string + metadata map[string]string + scalerIndex int + metricName string } var testParseMetadataMetricName = []parseMetadataMetricNameTestData{ // WorkspaceID - {map[string]string{"tenantIdFromEnv": "d248da64-0e1e-4f79-b8c6-72ab7aa055eb", "clientIdFromEnv": "41826dd4-9e0a-4357-a5bd-a88ad771ea7d", "clientSecretFromEnv": "U6DtAX5r6RPZxd~l12Ri3X8J9urt5Q-xs", "workspaceIdFromEnv": "074dd9f8-c368-4220-9400-acb6e80fc325", "query": query, "threshold": "1900000000"}, "azure-log-analytics-074dd9f8-c368-4220-9400-acb6e80fc325"}, + {map[string]string{"tenantIdFromEnv": "d248da64-0e1e-4f79-b8c6-72ab7aa055eb", "clientIdFromEnv": "41826dd4-9e0a-4357-a5bd-a88ad771ea7d", "clientSecretFromEnv": "U6DtAX5r6RPZxd~l12Ri3X8J9urt5Q-xs", "workspaceIdFromEnv": "074dd9f8-c368-4220-9400-acb6e80fc325", "query": query, "threshold": "1900000000"}, 0, "azure-log-analytics-074dd9f8-c368-4220-9400-acb6e80fc325"}, // Custom Name - {map[string]string{"metricName": "testName", "tenantIdFromEnv": "d248da64-0e1e-4f79-b8c6-72ab7aa055eb", "clientIdFromEnv": "41826dd4-9e0a-4357-a5bd-a88ad771ea7d", "clientSecretFromEnv": "U6DtAX5r6RPZxd~l12Ri3X8J9urt5Q-xs", "workspaceIdFromEnv": "074dd9f8-c368-4220-9400-acb6e80fc325", "query": query, "threshold": "1900000000"}, "azure-log-analytics-testName"}, + {map[string]string{"metricName": "testName", "tenantIdFromEnv": "d248da64-0e1e-4f79-b8c6-72ab7aa055eb", "clientIdFromEnv": "41826dd4-9e0a-4357-a5bd-a88ad771ea7d", "clientSecretFromEnv": "U6DtAX5r6RPZxd~l12Ri3X8J9urt5Q-xs", "workspaceIdFromEnv": "074dd9f8-c368-4220-9400-acb6e80fc325", "query": query, "threshold": "1900000000"}, 1, "azure-log-analytics-testName"}, } func TestLogAnalyticsParseMetadataMetricName(t *testing.T) { for _, testData := range testParseMetadataMetricName { - meta, err := parseAzureLogAnalyticsMetadata(&ScalerConfig{ResolvedEnv: sampleLogAnalyticsResolvedEnv, TriggerMetadata: testData.metadata, AuthParams: nil, PodIdentity: ""}) + meta, err := parseAzureLogAnalyticsMetadata(&ScalerConfig{ResolvedEnv: sampleLogAnalyticsResolvedEnv, TriggerMetadata: testData.metadata, AuthParams: nil, PodIdentity: "", ScalerIndex: testData.scalerIndex}) if err != nil { t.Error("Expected success but got error", err) } diff --git a/pkg/scalers/azure_monitor_scaler.go b/pkg/scalers/azure_monitor_scaler.go index 3ec37e06059..2f75c12edcd 100644 --- a/pkg/scalers/azure_monitor_scaler.go +++ b/pkg/scalers/azure_monitor_scaler.go @@ -47,6 +47,7 @@ type azureMonitorScaler struct { type azureMonitorMetadata struct { azureMonitorInfo azure.MonitorInfo targetValue int + scalerIndex int } var azureMonitorLog = logf.Log.WithName("azure_monitor_scaler") @@ -145,6 +146,8 @@ func parseAzureMonitorMetadata(config *ScalerConfig) (*azureMonitorMetadata, err meta.azureMonitorInfo.ClientID = clientID meta.azureMonitorInfo.ClientPassword = clientPassword + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -191,7 +194,7 @@ func (s *azureMonitorScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricVal := resource.NewQuantity(int64(s.metadata.targetValue), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "azure-monitor", s.metadata.azureMonitorInfo.ResourceURI, s.metadata.azureMonitorInfo.ResourceGroupName, s.metadata.azureMonitorInfo.Name)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "azure-monitor", s.metadata.azureMonitorInfo.ResourceURI, s.metadata.azureMonitorInfo.ResourceGroupName, s.metadata.azureMonitorInfo.Name))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_monitor_scaler_test.go b/pkg/scalers/azure_monitor_scaler_test.go index 155dbbdc374..8c20e532370 100644 --- a/pkg/scalers/azure_monitor_scaler_test.go +++ b/pkg/scalers/azure_monitor_scaler_test.go @@ -32,6 +32,7 @@ type parseAzMonitorMetadataTestData struct { type azMonitorMetricIdentifier struct { metadataTestData *parseAzMonitorMetadataTestData + scalerIndex int name string } @@ -80,7 +81,8 @@ var testParseAzMonitorMetadata = []parseAzMonitorMetadataTestData{ } var azMonitorMetricIdentifiers = []azMonitorMetricIdentifier{ - {&testParseAzMonitorMetadata[1], "azure-monitor-test-resource-uri-test-metric"}, + {&testParseAzMonitorMetadata[1], 0, "s0-azure-monitor-test-resource-uri-test-metric"}, + {&testParseAzMonitorMetadata[1], 1, "s1-azure-monitor-test-resource-uri-test-metric"}, } func TestAzMonitorParseMetadata(t *testing.T) { @@ -97,7 +99,7 @@ func TestAzMonitorParseMetadata(t *testing.T) { func TestAzMonitorGetMetricSpecForScaling(t *testing.T) { for _, testData := range azMonitorMetricIdentifiers { - meta, err := parseAzureMonitorMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity}) + meta, err := parseAzureMonitorMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/azure_pipelines_scaler.go b/pkg/scalers/azure_pipelines_scaler.go index 70d2ba62e8c..7700c54b1e0 100644 --- a/pkg/scalers/azure_pipelines_scaler.go +++ b/pkg/scalers/azure_pipelines_scaler.go @@ -33,6 +33,7 @@ type azurePipelinesMetadata struct { personalAccessToken string poolID string targetPipelinesQueueLength int + scalerIndex int } var azurePipelinesLog = logf.Log.WithName("azure_pipelines_scaler") @@ -95,6 +96,8 @@ func parseAzurePipelinesMetadata(config *ScalerConfig) (*azurePipelinesMetadata, return nil, fmt.Errorf("no poolID given") } + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -166,7 +169,7 @@ func (s *azurePipelinesScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetPipelinesQueueLengthQty := resource.NewQuantity(int64(s.metadata.targetPipelinesQueueLength), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "azure-pipelines-queue", s.metadata.organizationName, s.metadata.poolID)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "azure-pipelines-queue", s.metadata.organizationName, s.metadata.poolID))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_pipelines_scaler_test.go b/pkg/scalers/azure_pipelines_scaler_test.go index 8a5d1cbb4a0..50842d0d8c9 100644 --- a/pkg/scalers/azure_pipelines_scaler_test.go +++ b/pkg/scalers/azure_pipelines_scaler_test.go @@ -14,6 +14,7 @@ type parseAzurePipelinesMetadataTestData struct { type azurePipelinesMetricIdentifier struct { metadataTestData *parseAzurePipelinesMetadataTestData + scalerIndex int name string } @@ -38,7 +39,8 @@ var testAzurePipelinesMetadata = []parseAzurePipelinesMetadataTestData{ } var azurePipelinesMetricIdentifiers = []azurePipelinesMetricIdentifier{ - {&testAzurePipelinesMetadata[1], "azure-pipelines-queue-sample-1"}, + {&testAzurePipelinesMetadata[1], 0, "s0-azure-pipelines-queue-sample-1"}, + {&testAzurePipelinesMetadata[1], 1, "s1-azure-pipelines-queue-sample-1"}, } func TestParseAzurePipelinesMetadata(t *testing.T) { @@ -55,7 +57,7 @@ func TestParseAzurePipelinesMetadata(t *testing.T) { func TestAzurePipelinesGetMetricSpecForScaling(t *testing.T) { for _, testData := range azurePipelinesMetricIdentifiers { - meta, err := parseAzurePipelinesMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams}) + meta, err := parseAzurePipelinesMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/azure_queue_scaler.go b/pkg/scalers/azure_queue_scaler.go index fc1da110ed4..f930eb3f840 100644 --- a/pkg/scalers/azure_queue_scaler.go +++ b/pkg/scalers/azure_queue_scaler.go @@ -53,6 +53,7 @@ type azureQueueMetadata struct { connection string accountName string endpointSuffix string + scalerIndex int } var azureQueueLog = logf.Log.WithName("azure_queue_scaler") @@ -132,6 +133,8 @@ func parseAzureQueueMetadata(config *ScalerConfig) (*azureQueueMetadata, kedav1a return nil, "", fmt.Errorf("pod identity %s not supported for azure storage queues", config.PodIdentity) } + meta.scalerIndex = config.ScalerIndex + return &meta, config.PodIdentity, nil } @@ -163,7 +166,7 @@ func (s *azureQueueScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetQueueLengthQty := resource.NewQuantity(int64(s.metadata.targetQueueLength), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "azure-queue", s.metadata.queueName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s", "azure-queue", s.metadata.queueName))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_queue_scaler_test.go b/pkg/scalers/azure_queue_scaler_test.go index 6ba7189d79c..3eb5ed0ca86 100644 --- a/pkg/scalers/azure_queue_scaler_test.go +++ b/pkg/scalers/azure_queue_scaler_test.go @@ -37,6 +37,7 @@ type parseAzQueueMetadataTestData struct { type azQueueMetricIdentifier struct { metadataTestData *parseAzQueueMetadataTestData + scalerIndex int name string } @@ -76,8 +77,8 @@ var testAzQueueMetadata = []parseAzQueueMetadataTestData{ } var azQueueMetricIdentifiers = []azQueueMetricIdentifier{ - {&testAzQueueMetadata[1], "azure-queue-sample"}, - {&testAzQueueMetadata[4], "azure-queue-sample_queue"}, + {&testAzQueueMetadata[1], 0, "s0-azure-queue-sample"}, + {&testAzQueueMetadata[4], 1, "s1-azure-queue-sample_queue"}, } func TestAzQueueParseMetadata(t *testing.T) { @@ -97,7 +98,7 @@ func TestAzQueueParseMetadata(t *testing.T) { func TestAzQueueGetMetricSpecForScaling(t *testing.T) { for _, testData := range azQueueMetricIdentifiers { - meta, podIdentity, err := parseAzureQueueMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity}) + meta, podIdentity, err := parseAzureQueueMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/azure_servicebus_scaler.go b/pkg/scalers/azure_servicebus_scaler.go index 402b0b36f57..bf66910bb9e 100755 --- a/pkg/scalers/azure_servicebus_scaler.go +++ b/pkg/scalers/azure_servicebus_scaler.go @@ -64,6 +64,7 @@ type azureServiceBusMetadata struct { entityType entityType namespace string endpointSuffix string + scalerIndex int } // NewAzureServiceBusScaler creates a new AzureServiceBusScaler @@ -155,6 +156,8 @@ func parseAzureServiceBusMetadata(config *ScalerConfig) (*azureServiceBusMetadat return nil, fmt.Errorf("azure service bus doesn't support pod identity %s", config.PodIdentity) } + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -191,7 +194,7 @@ func (s *azureServiceBusScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { } externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/azure_servicebus_scaler_test.go b/pkg/scalers/azure_servicebus_scaler_test.go index 136b085a465..b9d6b85e046 100755 --- a/pkg/scalers/azure_servicebus_scaler_test.go +++ b/pkg/scalers/azure_servicebus_scaler_test.go @@ -47,6 +47,7 @@ type parseServiceBusMetadataTestData struct { type azServiceBusMetricIdentifier struct { metadataTestData *parseServiceBusMetadataTestData + scalerIndex int name string } @@ -99,8 +100,8 @@ var parseServiceBusMetadataDataset = []parseServiceBusMetadataTestData{ } var azServiceBusMetricIdentifiers = []azServiceBusMetricIdentifier{ - {&parseServiceBusMetadataDataset[1], "azure-servicebus-namespacename-testqueue"}, - {&parseServiceBusMetadataDataset[3], "azure-servicebus-namespacename-testtopic-testsubscription"}, + {&parseServiceBusMetadataDataset[1], 0, "s0-azure-servicebus-namespacename-testqueue"}, + {&parseServiceBusMetadataDataset[3], 1, "s1-azure-servicebus-namespacename-testtopic-testsubscription"}, } var commonHTTPClient = &http.Client{ @@ -136,7 +137,7 @@ var getServiceBusLengthTestScalers = []azureServiceBusScaler{ func TestParseServiceBusMetadata(t *testing.T) { for _, testData := range parseServiceBusMetadataDataset { - meta, err := parseAzureServiceBusMetadata(&ScalerConfig{ResolvedEnv: sampleResolvedEnv, TriggerMetadata: testData.metadata, AuthParams: testData.authParams, PodIdentity: testData.podIdentity}) + meta, err := parseAzureServiceBusMetadata(&ScalerConfig{ResolvedEnv: sampleResolvedEnv, TriggerMetadata: testData.metadata, AuthParams: testData.authParams, PodIdentity: testData.podIdentity, ScalerIndex: testEventHubScaler.metadata.scalerIndex}) if err != nil && !testData.isError { t.Error("Expected success but got error", err) @@ -189,7 +190,7 @@ func TestGetServiceBusLength(t *testing.T) { func TestAzServiceBusGetMetricSpecForScaling(t *testing.T) { for _, testData := range azServiceBusMetricIdentifiers { - meta, err := parseAzureServiceBusMetadata(&ScalerConfig{ResolvedEnv: connectionResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity}) + meta, err := parseAzureServiceBusMetadata(&ScalerConfig{ResolvedEnv: connectionResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams, PodIdentity: testData.metadataTestData.podIdentity, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/rabbitmq_scaler.go b/pkg/scalers/rabbitmq_scaler.go index 2f379cef8ac..0fcee476817 100644 --- a/pkg/scalers/rabbitmq_scaler.go +++ b/pkg/scalers/rabbitmq_scaler.go @@ -60,17 +60,18 @@ type rabbitMQScaler struct { } type rabbitMQMetadata struct { - queueName string - mode string // QueueLength or MessageRate - value int // trigger value (queue length or publish/sec. rate) - host string // connection string for either HTTP or AMQP protocol - protocol string // either http or amqp protocol - vhostName *string // override the vhost from the connection info - useRegex bool // specify if the queueName contains a rexeg - pageSize int // specify the page size if useRegex is enabled - operation string // specify the operation to apply in case of multiples queues - metricName string // custom metric name for trigger - timeout time.Duration // custom http timeout for a specific trigger + queueName string + mode string // QueueLength or MessageRate + value int // trigger value (queue length or publish/sec. rate) + host string // connection string for either HTTP or AMQP protocol + protocol string // either http or amqp protocol + vhostName *string // override the vhost from the connection info + useRegex bool // specify if the queueName contains a rexeg + pageSize int // specify the page size if useRegex is enabled + operation string // specify the operation to apply in case of multiples queues + metricName string // custom metric name for trigger + timeout time.Duration // custom http timeout for a specific trigger + scalerIndex int // scaler index } type queueInfo struct { @@ -255,6 +256,8 @@ func parseRabbitMQMetadata(config *ScalerConfig) (*rabbitMQMetadata, error) { meta.timeout = config.GlobalHTTPTimeout } + meta.scalerIndex = config.ScalerIndex + return &meta, nil } @@ -448,7 +451,7 @@ func (s *rabbitMQScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/rabbitmq_scaler_test.go b/pkg/scalers/rabbitmq_scaler_test.go index dd43416bafa..2677851aab0 100644 --- a/pkg/scalers/rabbitmq_scaler_test.go +++ b/pkg/scalers/rabbitmq_scaler_test.go @@ -24,6 +24,7 @@ type parseRabbitMQMetadataTestData struct { type rabbitMQMetricIdentifier struct { metadataTestData *parseRabbitMQMetadataTestData + index int name string } @@ -113,9 +114,9 @@ var testRabbitMQMetadata = []parseRabbitMQMetadataTestData{ } var rabbitMQMetricIdentifiers = []rabbitMQMetricIdentifier{ - {&testRabbitMQMetadata[1], "rabbitmq-sample"}, - {&testRabbitMQMetadata[7], "rabbitmq-namespace-2Fname"}, - {&testRabbitMQMetadata[31], "rabbitmq-host1-sample"}, + {&testRabbitMQMetadata[1], 0, "s0-rabbitmq-sample"}, + {&testRabbitMQMetadata[7], 1, "s1-rabbitmq-namespace-2Fname"}, + {&testRabbitMQMetadata[31], 2, "s2-rabbitmq-host1-sample"}, } func TestRabbitMQParseMetadata(t *testing.T) { @@ -448,7 +449,7 @@ func TestGetPageSizeWithRegex(t *testing.T) { func TestRabbitMQGetMetricSpecForScaling(t *testing.T) { for _, testData := range rabbitMQMetricIdentifiers { - meta, err := parseRabbitMQMetadata(&ScalerConfig{ResolvedEnv: sampleRabbitMqResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil}) + meta, err := parseRabbitMQMetadata(&ScalerConfig{ResolvedEnv: sampleRabbitMqResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil, ScalerIndex: testData.index}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/scaler.go b/pkg/scalers/scaler.go index 1a7cc9e171f..d5941be6f38 100644 --- a/pkg/scalers/scaler.go +++ b/pkg/scalers/scaler.go @@ -81,6 +81,9 @@ type ScalerConfig struct { // PodIdentity PodIdentity kedav1alpha1.PodIdentityProvider + + // ScalerIndex + ScalerIndex int } // GetFromAuthOrMeta helps getting a field from Auth or Meta sections @@ -97,3 +100,8 @@ func GetFromAuthOrMeta(config *ScalerConfig, field string) (string, error) { } return result, err } + +// GenerateMetricNameWithIndex helps adding the index prefix to the metric name +func GenerateMetricNameWithIndex(scalerIndex int, metricName string) string { + return fmt.Sprintf("s%d-%s", scalerIndex, metricName) +} diff --git a/pkg/scaling/scale_handler.go b/pkg/scaling/scale_handler.go index e2da33b7c0d..859ba16b15e 100644 --- a/pkg/scaling/scale_handler.go +++ b/pkg/scaling/scale_handler.go @@ -280,7 +280,7 @@ func (h *scaleHandler) buildScalers(withTriggers *kedav1alpha1.WithTriggers, pod } } - for i, trigger := range withTriggers.Spec.Triggers { + for scalerIndex, trigger := range withTriggers.Spec.Triggers { config := &scalers.ScalerConfig{ Name: withTriggers.Name, Namespace: withTriggers.Namespace, @@ -288,6 +288,7 @@ func (h *scaleHandler) buildScalers(withTriggers *kedav1alpha1.WithTriggers, pod ResolvedEnv: resolvedEnv, AuthParams: make(map[string]string), GlobalHTTPTimeout: h.globalHTTPTimeout, + ScalerIndex: scalerIndex, } config.AuthParams, config.PodIdentity, err = resolver.ResolveAuthRefAndPodIdentity(h.client, logger, trigger.AuthenticationRef, podTemplateSpec, withTriggers.Namespace) @@ -300,7 +301,7 @@ func (h *scaleHandler) buildScalers(withTriggers *kedav1alpha1.WithTriggers, pod if err != nil { closeScalers(scalersRes) h.recorder.Event(withTriggers, corev1.EventTypeWarning, eventreason.KEDAScalerFailed, err.Error()) - return []scalers.Scaler{}, fmt.Errorf("error getting scaler for trigger #%d: %s", i, err) + return []scalers.Scaler{}, fmt.Errorf("error getting scaler for trigger #%d: %s", scalerIndex, err) } scalersRes = append(scalersRes, scaler) From 141d195a5feef174847fa8e75c326f6c4a205c97 Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Sun, 10 Oct 2021 17:27:20 +0200 Subject: [PATCH 02/11] Update other scalers to use scalerIndex Signed-off-by: Jorge Turrado --- pkg/scalers/cron_scaler.go | 5 +- pkg/scalers/cron_scaler_test.go | 7 ++- pkg/scalers/external_scaler.go | 5 +- pkg/scalers/gcp_pub_sub_scaler.go | 4 +- pkg/scalers/gcp_pubsub_scaler_test.go | 6 +- pkg/scalers/graphite_scaler.go | 6 +- pkg/scalers/graphite_scaler_test.go | 6 +- pkg/scalers/huawei_cloudeye_scaler.go | 9 ++- pkg/scalers/huawei_cloudeye_test.go | 6 +- pkg/scalers/ibmmq_scaler.go | 5 +- pkg/scalers/ibmmq_scaler_test.go | 6 +- pkg/scalers/influxdb_scaler.go | 4 +- pkg/scalers/influxdb_scaler_test.go | 7 ++- pkg/scalers/kafka_scaler.go | 6 +- pkg/scalers/kafka_scaler_test.go | 6 +- pkg/scalers/kubernetes_workload_scaler.go | 4 +- .../kubernetes_workload_scaler_test.go | 16 ++--- pkg/scalers/liiklus_scaler.go | 4 +- pkg/scalers/liiklus_scaler_test.go | 6 +- pkg/scalers/metrics_api_scaler.go | 6 +- pkg/scalers/metrics_api_scaler_test.go | 2 +- pkg/scalers/mongo_scaler.go | 7 ++- pkg/scalers/mongo_scaler_test.go | 6 +- pkg/scalers/mssql_scaler.go | 7 ++- pkg/scalers/mysql_scaler.go | 5 +- pkg/scalers/mysql_scaler_test.go | 7 ++- pkg/scalers/openstack_metrics_scaler.go | 7 ++- pkg/scalers/openstack_metrics_scaler_test.go | 56 ++++++++--------- pkg/scalers/openstack_swift_scaler.go | 7 ++- pkg/scalers/openstack_swift_scaler_test.go | 61 ++++++++++--------- pkg/scalers/postgresql_scaler.go | 5 +- pkg/scalers/postgresql_scaler_test.go | 15 ++--- pkg/scalers/prometheus_scaler.go | 7 ++- pkg/scalers/prometheus_scaler_test.go | 6 +- pkg/scalers/rabbitmq_scaler.go | 1 - pkg/scalers/redis_scaler.go | 6 +- pkg/scalers/redis_scaler_test.go | 6 +- pkg/scalers/redis_streams_scaler.go | 6 +- pkg/scalers/redis_streams_scaler_test.go | 6 +- pkg/scalers/selenium_grid_scaler.go | 3 +- pkg/scalers/solace_scaler.go | 8 ++- pkg/scalers/solace_scaler_test.go | 41 ++++++++++--- pkg/scalers/stan_scaler.go | 5 +- pkg/scalers/stan_scaler_test.go | 6 +- 44 files changed, 258 insertions(+), 152 deletions(-) diff --git a/pkg/scalers/cron_scaler.go b/pkg/scalers/cron_scaler.go index 4f6fac823bd..be749e0a362 100644 --- a/pkg/scalers/cron_scaler.go +++ b/pkg/scalers/cron_scaler.go @@ -32,6 +32,7 @@ type cronMetadata struct { end string timezone string desiredReplicas int64 + scalerIndex int } var cronLog = logf.Log.WithName("cron_scaler") @@ -105,7 +106,7 @@ func parseCronMetadata(config *ScalerConfig) (*cronMetadata, error) { } else { return nil, fmt.Errorf("no DesiredReplicas specified. %s", config.TriggerMetadata) } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -156,7 +157,7 @@ func (s *cronScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(specReplicas), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "cron", s.metadata.timezone, parseCronTimeFormat(s.metadata.start), parseCronTimeFormat(s.metadata.end))), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "cron", s.metadata.timezone, parseCronTimeFormat(s.metadata.start), parseCronTimeFormat(s.metadata.end)))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/cron_scaler_test.go b/pkg/scalers/cron_scaler_test.go index 5b6eccdcd75..30efad9cbf8 100644 --- a/pkg/scalers/cron_scaler_test.go +++ b/pkg/scalers/cron_scaler_test.go @@ -15,6 +15,7 @@ type parseCronMetadataTestData struct { type cronMetricIdentifier struct { metadataTestData *parseCronMetadataTestData + scalerIndex int name string } @@ -49,8 +50,8 @@ var testCronMetadata = []parseCronMetadataTestData{ } var cronMetricIdentifiers = []cronMetricIdentifier{ - {&testCronMetadata[1], "cron-Etc-UTC-00xxThu-5923xxThu"}, - {&testCronMetadata[2], "cron-Etc-UTC-0xSl2xxx-01-23Sl2xxx"}, + {&testCronMetadata[1], 0, "s0-cron-Etc-UTC-00xxThu-5923xxThu"}, + {&testCronMetadata[2], 1, "s1-cron-Etc-UTC-0xSl2xxx-01-23Sl2xxx"}, } var tz, _ = time.LoadLocation(validCronMetadata2["timezone"]) @@ -113,7 +114,7 @@ func TestGetMetricsRange(t *testing.T) { func TestCronGetMetricSpecForScaling(t *testing.T) { for _, testData := range cronMetricIdentifiers { - meta, err := parseCronMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata}) + meta, err := parseCronMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/external_scaler.go b/pkg/scalers/external_scaler.go index eb7c73a69d0..914a18906fb 100644 --- a/pkg/scalers/external_scaler.go +++ b/pkg/scalers/external_scaler.go @@ -33,6 +33,7 @@ type externalScalerMetadata struct { scalerAddress string tlsCertFile string originalMetadata map[string]string + scalerIndex int } type connectionGroup struct { @@ -111,7 +112,7 @@ func parseExternalScalerMetadata(config *ScalerConfig) (externalScalerMetadata, meta.originalMetadata[key] = value } } - + meta.scalerIndex = config.ScalerIndex return meta, nil } @@ -159,7 +160,7 @@ func (s *externalScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: spec.MetricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, spec.MetricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/gcp_pub_sub_scaler.go b/pkg/scalers/gcp_pub_sub_scaler.go index feac8efc4ee..2beb2baaccc 100644 --- a/pkg/scalers/gcp_pub_sub_scaler.go +++ b/pkg/scalers/gcp_pub_sub_scaler.go @@ -34,6 +34,7 @@ type pubsubMetadata struct { targetSubscriptionSize int subscriptionName string gcpAuthorization gcpAuthorizationMetadata + scalerIndex int } var gcpPubSubLog = logf.Log.WithName("gcp_pub_sub_scaler") @@ -78,6 +79,7 @@ func parsePubSubMetadata(config *ScalerConfig) (*pubsubMetadata, error) { return nil, err } meta.gcpAuthorization = *auth + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -112,7 +114,7 @@ func (s *pubsubScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "gcp", s.metadata.subscriptionName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s", "gcp", s.metadata.subscriptionName))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/gcp_pubsub_scaler_test.go b/pkg/scalers/gcp_pubsub_scaler_test.go index 6a7c6b38f20..655e0c869cb 100644 --- a/pkg/scalers/gcp_pubsub_scaler_test.go +++ b/pkg/scalers/gcp_pubsub_scaler_test.go @@ -16,6 +16,7 @@ type parsePubSubMetadataTestData struct { type gcpPubSubMetricIdentifier struct { metadataTestData *parsePubSubMetadataTestData + scalerIndex int name string } @@ -36,7 +37,8 @@ var testPubSubMetadata = []parsePubSubMetadataTestData{ } var gcpPubSubMetricIdentifiers = []gcpPubSubMetricIdentifier{ - {&testPubSubMetadata[1], "gcp-mysubscription"}, + {&testPubSubMetadata[1], 0, "s0-gcp-mysubscription"}, + {&testPubSubMetadata[1], 1, "s1-gcp-mysubscription"}, } func TestPubSubParseMetadata(t *testing.T) { @@ -53,7 +55,7 @@ func TestPubSubParseMetadata(t *testing.T) { func TestGcpPubSubGetMetricSpecForScaling(t *testing.T) { for _, testData := range gcpPubSubMetricIdentifiers { - meta, err := parsePubSubMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testPubSubResolvedEnv}) + meta, err := parsePubSubMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testPubSubResolvedEnv, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/graphite_scaler.go b/pkg/scalers/graphite_scaler.go index 8976aedfc53..beb82d35fa5 100644 --- a/pkg/scalers/graphite_scaler.go +++ b/pkg/scalers/graphite_scaler.go @@ -43,6 +43,7 @@ type graphiteMetadata struct { enableBasicAuth bool username string password string // +optional + scalerIndex int } type grapQueryResult []struct { @@ -104,6 +105,8 @@ func parseGraphiteMetadata(config *ScalerConfig) (*graphiteMetadata, error) { meta.threshold = t } + meta.scalerIndex = config.ScalerIndex + val, ok := config.TriggerMetadata["authMode"] // no authMode specified if !ok { @@ -142,9 +145,10 @@ func (s *graphiteScaler) Close() error { func (s *graphiteScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(s.metadata.threshold), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s", "graphite", s.metadata.metricName)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "graphite", s.metadata.metricName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/graphite_scaler_test.go b/pkg/scalers/graphite_scaler_test.go index dd0eb374e50..8f903d3edd1 100644 --- a/pkg/scalers/graphite_scaler_test.go +++ b/pkg/scalers/graphite_scaler_test.go @@ -12,6 +12,7 @@ type parseGraphiteMetadataTestData struct { type graphiteMetricIdentifier struct { metadataTestData *parseGraphiteMetadataTestData + scalerIndex int name string } @@ -32,7 +33,8 @@ var testGrapMetadata = []parseGraphiteMetadataTestData{ } var graphiteMetricIdentifiers = []graphiteMetricIdentifier{ - {&testGrapMetadata[1], "graphite-request-count"}, + {&testGrapMetadata[1], 0, "s0-graphite-request-count"}, + {&testGrapMetadata[1], 1, "s1-graphite-request-count"}, } type graphiteAuthMetadataTestData struct { @@ -64,7 +66,7 @@ func TestGraphiteParseMetadata(t *testing.T) { func TestGraphiteGetMetricSpecForScaling(t *testing.T) { for _, testData := range graphiteMetricIdentifiers { - meta, err := parseGraphiteMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata}) + meta, err := parseGraphiteMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/huawei_cloudeye_scaler.go b/pkg/scalers/huawei_cloudeye_scaler.go index d0b2ea9327c..a91af257d6e 100644 --- a/pkg/scalers/huawei_cloudeye_scaler.go +++ b/pkg/scalers/huawei_cloudeye_scaler.go @@ -46,6 +46,8 @@ type huaweiCloudeyeMetadata struct { metricPeriod string huaweiAuthorization huaweiAuthorizationMetadata + + scalerIndex int } type huaweiAuthorizationMetadata struct { @@ -164,7 +166,7 @@ func parseHuaweiCloudeyeMetadata(config *ScalerConfig) (*huaweiCloudeyeMetadata, } meta.huaweiAuthorization = auth - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -243,9 +245,10 @@ func (h *huaweiCloudeyeScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(h.metadata.targetMetricValue), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s-%s", "huawei-cloudeye", h.metadata.namespace, + Name: GenerateMetricNameWithIndex(h.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s-%s", "huawei-cloudeye", + h.metadata.namespace, h.metadata.metricsName, - h.metadata.dimensionName, h.metadata.dimensionValue)), + h.metadata.dimensionName, h.metadata.dimensionValue))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/huawei_cloudeye_test.go b/pkg/scalers/huawei_cloudeye_test.go index 28b8a0bdd90..89b12ee0443 100644 --- a/pkg/scalers/huawei_cloudeye_test.go +++ b/pkg/scalers/huawei_cloudeye_test.go @@ -24,6 +24,7 @@ type parseHuaweiCloudeyeMetadataTestData struct { type huaweiCloudeyeMetricIdentifier struct { metadataTestData *parseHuaweiCloudeyeMetadataTestData + scalerIndex int name string } @@ -140,7 +141,8 @@ var testHuaweiCloudeyeMetadata = []parseHuaweiCloudeyeMetadataTestData{ } var huaweiCloudeyeMetricIdentifiers = []huaweiCloudeyeMetricIdentifier{ - {&testHuaweiCloudeyeMetadata[0], "huawei-cloudeye-SYS-ELB-mb_l7_qps-lbaas_instance_id-5e052238-0346-xxb0-86ea-92d9f33e29d2"}, + {&testHuaweiCloudeyeMetadata[0], 0, "s0-huawei-cloudeye-SYS-ELB-mb_l7_qps-lbaas_instance_id-5e052238-0346-xxb0-86ea-92d9f33e29d2"}, + {&testHuaweiCloudeyeMetadata[0], 1, "s1-huawei-cloudeye-SYS-ELB-mb_l7_qps-lbaas_instance_id-5e052238-0346-xxb0-86ea-92d9f33e29d2"}, } func TestHuaweiCloudeyeParseMetadata(t *testing.T) { @@ -157,7 +159,7 @@ func TestHuaweiCloudeyeParseMetadata(t *testing.T) { func TestHuaweiCloudeyeGetMetricSpecForScaling(t *testing.T) { for _, testData := range huaweiCloudeyeMetricIdentifiers { - meta, err := parseHuaweiCloudeyeMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams}) + meta, err := parseHuaweiCloudeyeMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/ibmmq_scaler.go b/pkg/scalers/ibmmq_scaler.go index e199c19225f..57c445bbaa7 100644 --- a/pkg/scalers/ibmmq_scaler.go +++ b/pkg/scalers/ibmmq_scaler.go @@ -43,6 +43,7 @@ type IBMMQMetadata struct { password string targetQueueDepth int tlsDisabled bool + scalerIndex int } // CommandResponse Full structured response from MQ admin REST query @@ -143,7 +144,7 @@ func parseIBMMQMetadata(config *ScalerConfig) (*IBMMQMetadata, error) { default: return nil, fmt.Errorf("no password given") } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -204,7 +205,7 @@ func (s *IBMMQScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetQueueLengthQty := resource.NewQuantity(int64(s.metadata.targetQueueDepth), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "IBMMQ", s.metadata.queueManager, s.metadata.queueName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "IBMMQ", s.metadata.queueManager, s.metadata.queueName))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/ibmmq_scaler_test.go b/pkg/scalers/ibmmq_scaler_test.go index d802d02e2c0..13ec97c4c47 100644 --- a/pkg/scalers/ibmmq_scaler_test.go +++ b/pkg/scalers/ibmmq_scaler_test.go @@ -27,12 +27,14 @@ var sampleIBMMQResolvedEnv = map[string]string{ // Test metric identifier with test MQ data and it's name type IBMMQMetricIdentifier struct { metadataTestData *parseIBMMQMetadataTestData + scalerIndex int name string } // Setting metric identifier mock name var IBMMQMetricIdentifiers = []IBMMQMetricIdentifier{ - {&testIBMMQMetadata[1], "IBMMQ-testQueueManager-testQueue"}, + {&testIBMMQMetadata[1], 0, "s0-IBMMQ-testQueueManager-testQueue"}, + {&testIBMMQMetadata[1], 1, "s1-IBMMQ-testQueueManager-testQueue"}, } // Test cases for TestIBMMQParseMetadata test @@ -103,7 +105,7 @@ func TestParseDefaultQueueDepth(t *testing.T) { // Create a scaler and check if metrics method is available func TestIBMMQGetMetricSpecForScaling(t *testing.T) { for _, testData := range IBMMQMetricIdentifiers { - metadata, err := parseIBMMQMetadata(&ScalerConfig{ResolvedEnv: sampleIBMMQResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams}) + metadata, err := parseIBMMQMetadata(&ScalerConfig{ResolvedEnv: sampleIBMMQResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}) httpTimeout := 100 * time.Millisecond if err != nil { diff --git a/pkg/scalers/influxdb_scaler.go b/pkg/scalers/influxdb_scaler.go index ff78c3215e6..1661e011d2e 100644 --- a/pkg/scalers/influxdb_scaler.go +++ b/pkg/scalers/influxdb_scaler.go @@ -31,6 +31,7 @@ type influxDBMetadata struct { serverURL string unsafeSsL bool thresholdValue float64 + scalerIndex int } var influxDBLog = logf.Log.WithName("influxdb_scaler") @@ -153,6 +154,7 @@ func parseInfluxDBMetadata(config *ScalerConfig) (*influxDBMetadata, error) { serverURL: serverURL, thresholdValue: thresholdValue, unsafeSsL: unsafeSsL, + scalerIndex: config.ScalerIndex, }, nil } @@ -222,7 +224,7 @@ func (s *influxDBScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(s.metadata.thresholdValue), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/influxdb_scaler_test.go b/pkg/scalers/influxdb_scaler_test.go index db83262183a..d528b7e19ea 100644 --- a/pkg/scalers/influxdb_scaler_test.go +++ b/pkg/scalers/influxdb_scaler_test.go @@ -19,6 +19,7 @@ type parseInfluxDBMetadataTestData struct { type influxDBMetricIdentifier struct { metadataTestData *parseInfluxDBMetadataTestData + scalerIndex int name string } @@ -46,8 +47,8 @@ var testInfluxDBMetadata = []parseInfluxDBMetadataTestData{ } var influxDBMetricIdentifiers = []influxDBMetricIdentifier{ - {&testInfluxDBMetadata[1], "influxdb-influx_metric"}, - {&testInfluxDBMetadata[2], "influxdb-https---xxx-influx_org"}, + {&testInfluxDBMetadata[1], 0, "s0-influxdb-influx_metric"}, + {&testInfluxDBMetadata[2], 1, "s1-influxdb-https---xxx-influx_org"}, } func TestInfluxDBParseMetadata(t *testing.T) { @@ -66,7 +67,7 @@ func TestInfluxDBParseMetadata(t *testing.T) { func TestInfluxDBGetMetricSpecForScaling(t *testing.T) { for _, testData := range influxDBMetricIdentifiers { - meta, err := parseInfluxDBMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testInfluxDBResolvedEnv}) + meta, err := parseInfluxDBMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testInfluxDBResolvedEnv, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/kafka_scaler.go b/pkg/scalers/kafka_scaler.go index 4869dd313a1..b5186e95c24 100644 --- a/pkg/scalers/kafka_scaler.go +++ b/pkg/scalers/kafka_scaler.go @@ -43,6 +43,8 @@ type kafkaMetadata struct { cert string key string ca string + + scalerIndex int } type offsetResetPolicy string @@ -201,7 +203,7 @@ func parseKafkaMetadata(config *ScalerConfig) (kafkaMetadata, error) { } meta.version = version } - + meta.scalerIndex = config.ScalerIndex return meta, nil } @@ -351,7 +353,7 @@ func (s *kafkaScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(s.metadata.lagThreshold, resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "kafka", s.metadata.topic, s.metadata.group)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "kafka", s.metadata.topic, s.metadata.group))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/kafka_scaler_test.go b/pkg/scalers/kafka_scaler_test.go index 54cb7dc167a..8c19bf26500 100644 --- a/pkg/scalers/kafka_scaler_test.go +++ b/pkg/scalers/kafka_scaler_test.go @@ -24,6 +24,7 @@ type parseKafkaAuthParamsTestData struct { type kafkaMetricIdentifier struct { metadataTestData *parseKafkaMetadataTestData + scalerIndex int name string } @@ -114,7 +115,8 @@ var parseKafkaAuthParamsTestDataset = []parseKafkaAuthParamsTestData{ } var kafkaMetricIdentifiers = []kafkaMetricIdentifier{ - {&parseKafkaMetadataTestDataset[4], "kafka-my-topic-my-group"}, + {&parseKafkaMetadataTestDataset[4], 0, "s0-kafka-my-topic-my-group"}, + {&parseKafkaMetadataTestDataset[4], 1, "s1-kafka-my-topic-my-group"}, } func TestGetBrokers(t *testing.T) { @@ -189,7 +191,7 @@ func TestKafkaAuthParams(t *testing.T) { } func TestKafkaGetMetricSpecForScaling(t *testing.T) { for _, testData := range kafkaMetricIdentifiers { - meta, err := parseKafkaMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, AuthParams: validWithAuthParams}) + meta, err := parseKafkaMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, AuthParams: validWithAuthParams, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/kubernetes_workload_scaler.go b/pkg/scalers/kubernetes_workload_scaler.go index d4b6a8c634c..513177e8bcd 100644 --- a/pkg/scalers/kubernetes_workload_scaler.go +++ b/pkg/scalers/kubernetes_workload_scaler.go @@ -32,6 +32,7 @@ type kubernetesWorkloadMetadata struct { podSelector labels.Selector namespace string value int64 + scalerIndex int } // NewKubernetesWorkloadScaler creates a new kubernetesWorkloadScaler @@ -59,6 +60,7 @@ func parseWorkloadMetadata(config *ScalerConfig) (*kubernetesWorkloadMetadata, e if err != nil || meta.value == 0 { return nil, fmt.Errorf("value must be an integer greater than 0") } + meta.scalerIndex = config.ScalerIndex return meta, nil } @@ -83,7 +85,7 @@ func (s *kubernetesWorkloadScaler) GetMetricSpecForScaling() []v2beta2.MetricSpe targetMetricValue := resource.NewQuantity(s.metadata.value, resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "workload", s.metadata.namespace, normalizeSelectorString(s.metadata.podSelector))), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "workload", s.metadata.namespace, normalizeSelectorString(s.metadata.podSelector)))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/kubernetes_workload_scaler_test.go b/pkg/scalers/kubernetes_workload_scaler_test.go index e8c5ed750f8..b7c20194b1d 100644 --- a/pkg/scalers/kubernetes_workload_scaler_test.go +++ b/pkg/scalers/kubernetes_workload_scaler_test.go @@ -84,20 +84,21 @@ func TestWorkloadIsActive(t *testing.T) { } type workloadGetMetricSpecForScalingTestData struct { - metadata map[string]string - namespace string - name string + metadata map[string]string + namespace string + scalerIndex int + name string } var getMetricSpecForScalingTestDataset = []workloadGetMetricSpecForScalingTestData{ // "podSelector": "app=demo", "namespace": "test" - {parseWorkloadMetadataTestDataset[0].metadata, parseWorkloadMetadataTestDataset[0].namespace, "workload-test-app=demo"}, + {parseWorkloadMetadataTestDataset[0].metadata, parseWorkloadMetadataTestDataset[0].namespace, 0, "s0-workload-test-app=demo"}, // "podSelector": "app=demo", "namespace": "default" - {parseWorkloadMetadataTestDataset[1].metadata, parseWorkloadMetadataTestDataset[1].namespace, "workload-default-app=demo"}, + {parseWorkloadMetadataTestDataset[1].metadata, parseWorkloadMetadataTestDataset[1].namespace, 1, "s1-workload-default-app=demo"}, // "podSelector": "app in (demo1, demo2)", "namespace": "test" - {parseWorkloadMetadataTestDataset[2].metadata, parseWorkloadMetadataTestDataset[2].namespace, "workload-test-appin-demo1-demo2-"}, + {parseWorkloadMetadataTestDataset[2].metadata, parseWorkloadMetadataTestDataset[2].namespace, 2, "s2-workload-test-appin-demo1-demo2-"}, // "podSelector": "app in (demo1, demo2),deploy in (deploy1, deploy2)", "namespace": "test" - {parseWorkloadMetadataTestDataset[3].metadata, parseWorkloadMetadataTestDataset[3].namespace, "workload-test-appin-demo1-demo2--deployin-deploy1-deploy2-"}, + {parseWorkloadMetadataTestDataset[3].metadata, parseWorkloadMetadataTestDataset[3].namespace, 3, "s3-workload-test-appin-demo1-demo2--deployin-deploy1-deploy2-"}, } func TestWorkloadGetMetricSpecForScaling(t *testing.T) { @@ -109,6 +110,7 @@ func TestWorkloadGetMetricSpecForScaling(t *testing.T) { AuthParams: map[string]string{}, GlobalHTTPTimeout: 1000 * time.Millisecond, Namespace: testData.namespace, + ScalerIndex: testData.scalerIndex, }, ) metric := s.GetMetricSpecForScaling() diff --git a/pkg/scalers/liiklus_scaler.go b/pkg/scalers/liiklus_scaler.go index cf82440f9ba..0743102daa7 100644 --- a/pkg/scalers/liiklus_scaler.go +++ b/pkg/scalers/liiklus_scaler.go @@ -30,6 +30,7 @@ type liiklusMetadata struct { topic string group string groupVersion uint32 + scalerIndex int } const ( @@ -85,7 +86,7 @@ func (s *liiklusScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(s.metadata.lagThreshold, resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "liiklus", s.metadata.topic, s.metadata.group)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "liiklus", s.metadata.topic, s.metadata.group))), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, @@ -182,5 +183,6 @@ func parseLiiklusMetadata(config *ScalerConfig) (*liiklusMetadata, error) { group: config.TriggerMetadata["group"], groupVersion: groupVersion, lagThreshold: lagThreshold, + scalerIndex: config.ScalerIndex, }, nil } diff --git a/pkg/scalers/liiklus_scaler_test.go b/pkg/scalers/liiklus_scaler_test.go index a8f8580d4f6..95454485807 100644 --- a/pkg/scalers/liiklus_scaler_test.go +++ b/pkg/scalers/liiklus_scaler_test.go @@ -21,6 +21,7 @@ type parseLiiklusMetadataTestData struct { type liiklusMetricIdentifier struct { metadataTestData *parseLiiklusMetadataTestData + scalerIndex int name string } @@ -33,7 +34,8 @@ var parseLiiklusMetadataTestDataset = []parseLiiklusMetadataTestData{ } var liiklusMetricIdentifiers = []liiklusMetricIdentifier{ - {&parseLiiklusMetadataTestDataset[4], "liiklus-foo-mygroup"}, + {&parseLiiklusMetadataTestDataset[4], 0, "s0-liiklus-foo-mygroup"}, + {&parseLiiklusMetadataTestDataset[4], 1, "s1-liiklus-foo-mygroup"}, } func TestLiiklusParseMetadata(t *testing.T) { @@ -165,7 +167,7 @@ func TestLiiklusScalerGetMetricsBehavior(t *testing.T) { func TestLiiklusGetMetricSpecForScaling(t *testing.T) { for _, testData := range liiklusMetricIdentifiers { - meta, err := parseLiiklusMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata}) + meta, err := parseLiiklusMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/metrics_api_scaler.go b/pkg/scalers/metrics_api_scaler.go index c23f1896118..a04231f2d8a 100644 --- a/pkg/scalers/metrics_api_scaler.go +++ b/pkg/scalers/metrics_api_scaler.go @@ -55,6 +55,8 @@ type metricsAPIScalerMetadata struct { // bearer enableBearerAuth bool bearerToken string + + scalerIndex int } const ( @@ -177,7 +179,7 @@ func parseMetricsAPIMetadata(config *ScalerConfig) (*metricsAPIScalerMetadata, e if len(config.AuthParams["ca"]) > 0 { meta.ca = config.AuthParams["ca"] } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -248,7 +250,7 @@ func (s *metricsAPIScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "http", s.metadata.url, s.metadata.valueLocation)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/metrics_api_scaler_test.go b/pkg/scalers/metrics_api_scaler_test.go index 92ba8010310..31502e5a6fa 100644 --- a/pkg/scalers/metrics_api_scaler_test.go +++ b/pkg/scalers/metrics_api_scaler_test.go @@ -168,7 +168,7 @@ func TestBearerAuth(t *testing.T) { ResolvedEnv: map[string]string{}, TriggerMetadata: metadata, AuthParams: authentication, - GlobalHTTPTimeout: 1000 * time.Millisecond, + GlobalHTTPTimeout: 3000 * time.Millisecond, }, ) if err != nil { diff --git a/pkg/scalers/mongo_scaler.go b/pkg/scalers/mongo_scaler.go index 74fae161d1d..3066515c56b 100644 --- a/pkg/scalers/mongo_scaler.go +++ b/pkg/scalers/mongo_scaler.go @@ -61,6 +61,10 @@ type mongoDBMetadata struct { // The name of the metric to use in the Horizontal Pod Autoscaler. This value will be prefixed with "mongodb-". // +optional metricName string + + // The index of the scaler inside the ScaledObject + // +internal + scalerIndex int } // Default variables and settings @@ -181,6 +185,7 @@ func parseMongoDBMetadata(config *ScalerConfig) (*mongoDBMetadata, string, error } meta.metricName = kedautil.NormalizeString(fmt.Sprintf("mongodb-%s-%s", maskedURL, meta.collection)) } + meta.scalerIndex = config.ScalerIndex return &meta, connStr, nil } @@ -248,7 +253,7 @@ func (s *mongoDBScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/mongo_scaler_test.go b/pkg/scalers/mongo_scaler_test.go index b0c449aac08..10a481eded1 100644 --- a/pkg/scalers/mongo_scaler_test.go +++ b/pkg/scalers/mongo_scaler_test.go @@ -20,6 +20,7 @@ type parseMongoDBMetadataTestData struct { type mongoDBMetricIdentifier struct { metadataTestData *parseMongoDBMetadataTestData + scalerIndex int name string } @@ -55,7 +56,8 @@ var testMONGODBMetadata = []parseMongoDBMetadataTestData{ } var mongoDBMetricIdentifiers = []mongoDBMetricIdentifier{ - {metadataTestData: &testMONGODBMetadata[2], name: "mongodb-hpa"}, + {metadataTestData: &testMONGODBMetadata[2], scalerIndex: 0, name: "s0-mongodb-hpa"}, + {metadataTestData: &testMONGODBMetadata[2], scalerIndex: 1, name: "s1-mongodb-hpa"}, } func TestParseMongoDBMetadata(t *testing.T) { @@ -72,7 +74,7 @@ func TestParseMongoDBMetadata(t *testing.T) { func TestMongoDBGetMetricSpecForScaling(t *testing.T) { for _, testData := range mongoDBMetricIdentifiers { - meta, _, err := parseMongoDBMetadata(&ScalerConfig{ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, TriggerMetadata: testData.metadataTestData.metadata}) + meta, _, err := parseMongoDBMetadata(&ScalerConfig{ResolvedEnv: testData.metadataTestData.resolvedEnv, AuthParams: testData.metadataTestData.authParams, TriggerMetadata: testData.metadataTestData.metadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/mssql_scaler.go b/pkg/scalers/mssql_scaler.go index 20c6b66ac30..7167d7f1327 100644 --- a/pkg/scalers/mssql_scaler.go +++ b/pkg/scalers/mssql_scaler.go @@ -56,6 +56,9 @@ type mssqlMetadata struct { // The name of the metric to use in the Horizontal Pod Autoscaler. This value will be prefixed with "mssql-". // +optional metricName string + // The index of the scaler inside the ScaledObject + // +internal + scalerIndex int } var mssqlLog = logf.Log.WithName("mssql_scaler") @@ -155,7 +158,7 @@ func parseMSSQLMetadata(config *ScalerConfig) (*mssqlMetadata, error) { meta.metricName = "mssql" } } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -216,7 +219,7 @@ func (s *mssqlScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetQueryValue := resource.NewQuantity(int64(s.metadata.targetValue), resource.DecimalSI) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/mysql_scaler.go b/pkg/scalers/mysql_scaler.go index a7445925b48..e28a0d81a9b 100644 --- a/pkg/scalers/mysql_scaler.go +++ b/pkg/scalers/mysql_scaler.go @@ -31,6 +31,7 @@ type mySQLMetadata struct { dbName string query string queryValue int + scalerIndex int } var mySQLLog = logf.Log.WithName("mysql_scaler") @@ -109,7 +110,7 @@ func parseMySQLMetadata(config *ScalerConfig) (*mySQLMetadata, error) { return nil, fmt.Errorf("no password given") } } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -190,7 +191,7 @@ func (s *mySQLScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { } externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/mysql_scaler_test.go b/pkg/scalers/mysql_scaler_test.go index 09f2d090c38..2eb4a08fed4 100644 --- a/pkg/scalers/mysql_scaler_test.go +++ b/pkg/scalers/mysql_scaler_test.go @@ -18,6 +18,7 @@ type parseMySQLMetadataTestData struct { type mySQLMetricIdentifier struct { metadataTestData *parseMySQLMetadataTestData + scalerIndex int name string } @@ -53,8 +54,8 @@ var testMySQLMetadata = []parseMySQLMetadataTestData{ } var mySQLMetricIdentifiers = []mySQLMetricIdentifier{ - {metadataTestData: &testMySQLMetadata[1], name: "mysql-test_conn_str"}, - {metadataTestData: &testMySQLMetadata[2], name: "mysql-test_dbname"}, + {metadataTestData: &testMySQLMetadata[1], scalerIndex: 0, name: "s0-mysql-test_conn_str"}, + {metadataTestData: &testMySQLMetadata[2], scalerIndex: 1, name: "s1-mysql-test_dbname"}, } func TestParseMySQLMetadata(t *testing.T) { @@ -92,7 +93,7 @@ func TestMetadataToConnectionStrBuildNew(t *testing.T) { func TestMySQLGetMetricSpecForScaling(t *testing.T) { for _, testData := range mySQLMetricIdentifiers { - meta, err := parseMySQLMetadata(&ScalerConfig{ResolvedEnv: testData.metadataTestData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil}) + meta, err := parseMySQLMetadata(&ScalerConfig{ResolvedEnv: testData.metadataTestData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/openstack_metrics_scaler.go b/pkg/scalers/openstack_metrics_scaler.go index 68f6a3ab287..27f1a66e34a 100644 --- a/pkg/scalers/openstack_metrics_scaler.go +++ b/pkg/scalers/openstack_metrics_scaler.go @@ -35,6 +35,7 @@ type openstackMetricMetadata struct { granularity int threshold float64 timeout int + scalerIndex int } type openstackMetricAuthenticationMetadata struct { @@ -163,7 +164,7 @@ func parseOpenstackMetricMetadata(config *ScalerConfig) (*openstackMetricMetadat } else { meta.timeout = metricDefaultHTTPClientTimeout } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -196,9 +197,11 @@ func parseOpenstackMetricAuthenticationMetadata(config *ScalerConfig) (openstack func (a *openstackMetricScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricVal := resource.NewQuantity(int64(a.metadata.threshold), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("openstack-metric-%s-%s-%s", a.metadata.metricID, strconv.FormatFloat(a.metadata.threshold, 'f', 0, 32), a.metadata.aggregationMethod)) + externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("openstack-metric-%s-%s-%s", a.metadata.metricID, strconv.FormatFloat(a.metadata.threshold, 'f', 0, 32), a.metadata.aggregationMethod)), + Name: GenerateMetricNameWithIndex(a.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/openstack_metrics_scaler_test.go b/pkg/scalers/openstack_metrics_scaler_test.go index 36eefba98f1..59ce658cd97 100644 --- a/pkg/scalers/openstack_metrics_scaler_test.go +++ b/pkg/scalers/openstack_metrics_scaler_test.go @@ -19,6 +19,7 @@ type openstackMetricScalerMetricIdentifier struct { resolvedEnv map[string]string metadataTestData *parseOpenstackMetricMetadataTestData authMetadataTestData *parseOpenstackMetricAuthMetadataTestData + scalerIndex int name string } @@ -88,26 +89,25 @@ var invalidOpenstackMetricAuthMetadataTestData = []parseOpenstackMetricAuthMetad func TestOpenstackMetricsGetMetricsForSpecScaling(t *testing.T) { // first, test cases with authentication based on password testCases := []openstackMetricScalerMetricIdentifier{ - {nil, &opentsackMetricMetadataTestData[0], &openstackMetricAuthMetadataTestData[0], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, - {nil, &opentsackMetricMetadataTestData[1], &openstackMetricAuthMetadataTestData[0], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-sum"}, - {nil, &opentsackMetricMetadataTestData[2], &openstackMetricAuthMetadataTestData[0], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-max"}, - {nil, &opentsackMetricMetadataTestData[3], &openstackMetricAuthMetadataTestData[0], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, - - {nil, &opentsackMetricMetadataTestData[0], &openstackMetricAuthMetadataTestData[1], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, - {nil, &opentsackMetricMetadataTestData[1], &openstackMetricAuthMetadataTestData[1], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-sum"}, - {nil, &opentsackMetricMetadataTestData[2], &openstackMetricAuthMetadataTestData[1], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-max"}, - {nil, &opentsackMetricMetadataTestData[3], &openstackMetricAuthMetadataTestData[1], "openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, + {nil, &opentsackMetricMetadataTestData[0], &openstackMetricAuthMetadataTestData[0], 0, "s0-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, + {nil, &opentsackMetricMetadataTestData[1], &openstackMetricAuthMetadataTestData[0], 1, "s1-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-sum"}, + {nil, &opentsackMetricMetadataTestData[2], &openstackMetricAuthMetadataTestData[0], 2, "s2-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-max"}, + {nil, &opentsackMetricMetadataTestData[3], &openstackMetricAuthMetadataTestData[0], 3, "s3-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, + {nil, &opentsackMetricMetadataTestData[0], &openstackMetricAuthMetadataTestData[1], 4, "s4-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, + {nil, &opentsackMetricMetadataTestData[1], &openstackMetricAuthMetadataTestData[1], 5, "s5-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-sum"}, + {nil, &opentsackMetricMetadataTestData[2], &openstackMetricAuthMetadataTestData[1], 6, "s6-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-max"}, + {nil, &opentsackMetricMetadataTestData[3], &openstackMetricAuthMetadataTestData[1], 7, "s7-openstack-metric-003bb589-166d-439d-8c31-cbf098d863de-1250-mean"}, } for _, testData := range testCases { testData := testData - meta, err := parseOpenstackMetricMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + meta, err := parseOpenstackMetricMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata from openstack metrics scaler") } - _, err = parseOpenstackMetricAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + _, err = parseOpenstackMetricAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("could not parse openstack metric authentication metadata") @@ -125,21 +125,21 @@ func TestOpenstackMetricsGetMetricsForSpecScaling(t *testing.T) { func TestOpenstackMetricsGetMetricsForSpecScalingInvalidMetaData(t *testing.T) { testCases := []openstackMetricScalerMetricIdentifier{ - {nil, &invalidOpenstackMetricMetadaTestData[0], &openstackMetricAuthMetadataTestData[0], "Missing metrics url"}, - {nil, &invalidOpenstackMetricMetadaTestData[1], &openstackMetricAuthMetadataTestData[0], "Empty metrics url"}, - {nil, &invalidOpenstackMetricMetadaTestData[2], &openstackMetricAuthMetadataTestData[0], "Missing metricID"}, - {nil, &invalidOpenstackMetricMetadaTestData[3], &openstackMetricAuthMetadataTestData[0], "Empty metricID"}, - {nil, &invalidOpenstackMetricMetadaTestData[4], &openstackMetricAuthMetadataTestData[0], "Missing aggregation method"}, - {nil, &invalidOpenstackMetricMetadaTestData[5], &openstackMetricAuthMetadataTestData[0], "Missing granularity"}, - {nil, &invalidOpenstackMetricMetadaTestData[6], &openstackMetricAuthMetadataTestData[0], "Missing threshold"}, - {nil, &invalidOpenstackMetricMetadaTestData[7], &openstackMetricAuthMetadataTestData[0], "Missing threshold"}, - {nil, &invalidOpenstackMetricMetadaTestData[8], &openstackMetricAuthMetadataTestData[0], "Missing threshold"}, + {nil, &invalidOpenstackMetricMetadaTestData[0], &openstackMetricAuthMetadataTestData[0], 0, "s0-Missing metrics url"}, + {nil, &invalidOpenstackMetricMetadaTestData[1], &openstackMetricAuthMetadataTestData[0], 1, "s1-Empty metrics url"}, + {nil, &invalidOpenstackMetricMetadaTestData[2], &openstackMetricAuthMetadataTestData[0], 2, "s2-Missing metricID"}, + {nil, &invalidOpenstackMetricMetadaTestData[3], &openstackMetricAuthMetadataTestData[0], 3, "s3-Empty metricID"}, + {nil, &invalidOpenstackMetricMetadaTestData[4], &openstackMetricAuthMetadataTestData[0], 4, "s4-Missing aggregation method"}, + {nil, &invalidOpenstackMetricMetadaTestData[5], &openstackMetricAuthMetadataTestData[0], 5, "s5-Missing granularity"}, + {nil, &invalidOpenstackMetricMetadaTestData[6], &openstackMetricAuthMetadataTestData[0], 6, "s6-Missing threshold"}, + {nil, &invalidOpenstackMetricMetadaTestData[7], &openstackMetricAuthMetadataTestData[0], 7, "s7-Missing threshold"}, + {nil, &invalidOpenstackMetricMetadaTestData[8], &openstackMetricAuthMetadataTestData[0], 8, "s8-Missing threshold"}, } for _, testData := range testCases { testData := testData t.Run(testData.name, func(pt *testing.T) { - _, err := parseOpenstackMetricMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + _, err := parseOpenstackMetricMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scalerIndex}) assert.NotNil(t, err) }) } @@ -147,18 +147,18 @@ func TestOpenstackMetricsGetMetricsForSpecScalingInvalidMetaData(t *testing.T) { func TestOpenstackMetricAuthenticationInvalidAuthMetadata(t *testing.T) { testCases := []openstackMetricScalerMetricIdentifier{ - {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[0], "Missing userID"}, - {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[1], "Missing password"}, - {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[2], "Missing authURL"}, - {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[3], "Missing appCredentialID and appCredentialSecret"}, - {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[4], "Missing appCredentialSecret"}, - {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[5], "Missing authURL - application credential"}, + {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[0], 0, "s0-Missing userID"}, + {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[1], 1, "s1-Missing password"}, + {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[2], 2, "s2-Missing authURL"}, + {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[3], 3, "s3-Missing appCredentialID and appCredentialSecret"}, + {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[4], 4, "s4-Missing appCredentialSecret"}, + {nil, &opentsackMetricMetadataTestData[0], &invalidOpenstackMetricAuthMetadataTestData[5], 5, "s5-Missing authURL - application credential"}, } for _, testData := range testCases { testData := testData t.Run(testData.name, func(ptr *testing.T) { - _, err := parseOpenstackMetricAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + _, err := parseOpenstackMetricAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scalerIndex}) assert.NotNil(t, err) }) } diff --git a/pkg/scalers/openstack_swift_scaler.go b/pkg/scalers/openstack_swift_scaler.go index 52a5e61c7e0..d1e965ccf2a 100644 --- a/pkg/scalers/openstack_swift_scaler.go +++ b/pkg/scalers/openstack_swift_scaler.go @@ -39,6 +39,7 @@ type openstackSwiftMetadata struct { objectLimit string httpClientTimeout int onlyFiles bool + scalerIndex int } type openstackSwiftAuthenticationMetadata struct { @@ -299,7 +300,7 @@ func parseOpenstackSwiftMetadata(config *ScalerConfig) (*openstackSwiftMetadata, } else { meta.objectLimit = defaultObjectLimit } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -391,9 +392,11 @@ func (s *openstackSwiftScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { metricName = s.metadata.containerName } + metricName = kedautil.NormalizeString(fmt.Sprintf("%s-%s", "openstack-swift", metricName)) + externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "openstack-swift", metricName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/openstack_swift_scaler_test.go b/pkg/scalers/openstack_swift_scaler_test.go index 5bb98df9109..1167d6ea650 100644 --- a/pkg/scalers/openstack_swift_scaler_test.go +++ b/pkg/scalers/openstack_swift_scaler_test.go @@ -19,6 +19,7 @@ type openstackSwiftMetricIdentifier struct { resolvedEnv map[string]string metadataTestData *parseOpenstackSwiftMetadataTestData authMetadataTestData *parseOpenstackSwiftAuthMetadataTestData + scaledIndex int name string } @@ -79,30 +80,30 @@ var invalidOpenstackSwiftAuthMetadataTestData = []parseOpenstackSwiftAuthMetadat func TestOpenstackSwiftGetMetricSpecForScaling(t *testing.T) { testCases := []openstackSwiftMetricIdentifier{ - {nil, &openstackSwiftMetadataTestData[0], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[1], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[2], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container-my-prefix"}, - {nil, &openstackSwiftMetadataTestData[3], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[4], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[5], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[6], &openstackSwiftAuthMetadataTestData[0], "openstack-swift-my-container"}, - - {nil, &openstackSwiftMetadataTestData[0], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[1], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[2], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container-my-prefix"}, - {nil, &openstackSwiftMetadataTestData[3], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[4], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[5], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container"}, - {nil, &openstackSwiftMetadataTestData[6], &openstackSwiftAuthMetadataTestData[1], "openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[0], &openstackSwiftAuthMetadataTestData[0], 0, "s0-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[1], &openstackSwiftAuthMetadataTestData[0], 1, "s1-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[2], &openstackSwiftAuthMetadataTestData[0], 2, "s2-openstack-swift-my-container-my-prefix"}, + {nil, &openstackSwiftMetadataTestData[3], &openstackSwiftAuthMetadataTestData[0], 3, "s3-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[4], &openstackSwiftAuthMetadataTestData[0], 4, "s4-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[5], &openstackSwiftAuthMetadataTestData[0], 5, "s5-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[6], &openstackSwiftAuthMetadataTestData[0], 6, "s6-openstack-swift-my-container"}, + + {nil, &openstackSwiftMetadataTestData[0], &openstackSwiftAuthMetadataTestData[1], 0, "s0-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[1], &openstackSwiftAuthMetadataTestData[1], 1, "s1-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[2], &openstackSwiftAuthMetadataTestData[1], 2, "s2-openstack-swift-my-container-my-prefix"}, + {nil, &openstackSwiftMetadataTestData[3], &openstackSwiftAuthMetadataTestData[1], 3, "s3-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[4], &openstackSwiftAuthMetadataTestData[1], 4, "s4-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[5], &openstackSwiftAuthMetadataTestData[1], 5, "s5-openstack-swift-my-container"}, + {nil, &openstackSwiftMetadataTestData[6], &openstackSwiftAuthMetadataTestData[1], 6, "s6-openstack-swift-my-container"}, } for _, testData := range testCases { testData := testData - meta, err := parseOpenstackSwiftMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + meta, err := parseOpenstackSwiftMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scaledIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } - _, err = parseOpenstackSwiftAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + _, err = parseOpenstackSwiftAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scaledIndex}) if err != nil { t.Fatal("Could not parse auth metadata:", err) } @@ -121,16 +122,16 @@ func TestOpenstackSwiftGetMetricSpecForScaling(t *testing.T) { func TestParseOpenstackSwiftMetadataForInvalidCases(t *testing.T) { testCases := []openstackSwiftMetricIdentifier{ - {nil, &invalidOpenstackSwiftMetadataTestData[0], &parseOpenstackSwiftAuthMetadataTestData{}, "missing containerName"}, - {nil, &invalidOpenstackSwiftMetadataTestData[1], &parseOpenstackSwiftAuthMetadataTestData{}, "objectCount is not an integer value"}, - {nil, &invalidOpenstackSwiftMetadataTestData[2], &parseOpenstackSwiftAuthMetadataTestData{}, "onlyFiles is not a boolean value"}, - {nil, &invalidOpenstackSwiftMetadataTestData[3], &parseOpenstackSwiftAuthMetadataTestData{}, "timeout is not an integer value"}, + {nil, &invalidOpenstackSwiftMetadataTestData[0], &parseOpenstackSwiftAuthMetadataTestData{}, 0, "s0-missing containerName"}, + {nil, &invalidOpenstackSwiftMetadataTestData[1], &parseOpenstackSwiftAuthMetadataTestData{}, 1, "s1-objectCount is not an integer value"}, + {nil, &invalidOpenstackSwiftMetadataTestData[2], &parseOpenstackSwiftAuthMetadataTestData{}, 2, "s2-onlyFiles is not a boolean value"}, + {nil, &invalidOpenstackSwiftMetadataTestData[3], &parseOpenstackSwiftAuthMetadataTestData{}, 3, "s3-timeout is not an integer value"}, } for _, testData := range testCases { testData := testData t.Run(testData.name, func(pt *testing.T) { - _, err := parseOpenstackSwiftMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + _, err := parseOpenstackSwiftMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scaledIndex}) assert.NotNil(t, err) }) } @@ -138,19 +139,19 @@ func TestParseOpenstackSwiftMetadataForInvalidCases(t *testing.T) { func TestParseOpenstackSwiftAuthenticationMetadataForInvalidCases(t *testing.T) { testCases := []openstackSwiftMetricIdentifier{ - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[0], "missing userID"}, - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[1], "missing password"}, - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[2], "missing projectID"}, - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[3], "missing authURL for password method"}, - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[4], "missing appCredentialID"}, - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[5], "missing appCredentialSecret"}, - {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[6], "missing authURL for application credentials method"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[0], 0, "s0-missing userID"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[1], 1, "s1-missing password"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[2], 2, "s2-missing projectID"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[3], 3, "s3-missing authURL for password method"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[4], 4, "s4-missing appCredentialID"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[5], 5, "s5-missing appCredentialSecret"}, + {nil, &parseOpenstackSwiftMetadataTestData{}, &invalidOpenstackSwiftAuthMetadataTestData[6], 6, "s6-missing authURL for application credentials method"}, } for _, testData := range testCases { testData := testData t.Run(testData.name, func(pt *testing.T) { - _, err := parseOpenstackSwiftAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata}) + _, err := parseOpenstackSwiftAuthenticationMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authMetadataTestData.authMetadata, ScalerIndex: testData.scaledIndex}) assert.NotNil(t, err) }) } diff --git a/pkg/scalers/postgresql_scaler.go b/pkg/scalers/postgresql_scaler.go index 5dc20b6e247..223b5d569e4 100644 --- a/pkg/scalers/postgresql_scaler.go +++ b/pkg/scalers/postgresql_scaler.go @@ -34,6 +34,7 @@ type postgreSQLMetadata struct { dbName string sslmode string metricName string + scalerIndex int } var postgreSQLLog = logf.Log.WithName("postgreSQL_scaler") @@ -128,7 +129,7 @@ func parsePostgreSQLMetadata(config *ScalerConfig) (*postgreSQLMetadata, error) meta.metricName = kedautil.NormalizeString(fmt.Sprintf("postgresql-%s", meta.dbName)) } } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -196,7 +197,7 @@ func (s *postgreSQLScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: s.metadata.metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/postgresql_scaler_test.go b/pkg/scalers/postgresql_scaler_test.go index 388c6858596..a86d2517786 100644 --- a/pkg/scalers/postgresql_scaler_test.go +++ b/pkg/scalers/postgresql_scaler_test.go @@ -12,6 +12,7 @@ type postgreSQLMetricIdentifier struct { metadataTestData *parsePostgreSQLMetadataTestData resolvedEnv map[string]string authParam map[string]string + scaleIndex int name string } @@ -31,17 +32,17 @@ var testPostgreSQLMetdata = []parsePostgreSQLMetadataTestData{ } var postgreSQLMetricIdentifiers = []postgreSQLMetricIdentifier{ - {&testPostgreSQLMetdata[0], map[string]string{"test_connection_string": "postgresql://localhost:5432"}, nil, "postgresql-postgresql---localhost-5432"}, - {&testPostgreSQLMetdata[1], map[string]string{"test_connection_string2": "postgresql://test@localhost"}, nil, "postgresql-postgresql---test@localhost"}, - {&testPostgreSQLMetdata[2], nil, map[string]string{"connection": "postgresql://user:password@localhost:5432/dbname"}, "postgresql-postgresql---user-xxx@localhost-5432-dbname"}, - {&testPostgreSQLMetdata[3], nil, map[string]string{"connection": "postgresql://Username123:secret@localhost"}, "postgresql-scaler_sql_data2"}, - {&testPostgreSQLMetdata[4], nil, map[string]string{"connection": "postgresql://user:password@localhost:5432/dbname?app_name=test"}, "postgresql-postgresql---user-xxx@localhost-5432-dbname?app_name=test"}, - {&testPostgreSQLMetdata[5], nil, map[string]string{"connection": "postgresql://Username123:secret@localhost"}, "postgresql-scaler_sql_data"}, + {&testPostgreSQLMetdata[0], map[string]string{"test_connection_string": "postgresql://localhost:5432"}, nil, 0, "s0-postgresql-postgresql---localhost-5432"}, + {&testPostgreSQLMetdata[1], map[string]string{"test_connection_string2": "postgresql://test@localhost"}, nil, 1, "s1-postgresql-postgresql---test@localhost"}, + {&testPostgreSQLMetdata[2], nil, map[string]string{"connection": "postgresql://user:password@localhost:5432/dbname"}, 2, "s2-postgresql-postgresql---user-xxx@localhost-5432-dbname"}, + {&testPostgreSQLMetdata[3], nil, map[string]string{"connection": "postgresql://Username123:secret@localhost"}, 3, "s3-postgresql-scaler_sql_data2"}, + {&testPostgreSQLMetdata[4], nil, map[string]string{"connection": "postgresql://user:password@localhost:5432/dbname?app_name=test"}, 4, "s4-postgresql-postgresql---user-xxx@localhost-5432-dbname?app_name=test"}, + {&testPostgreSQLMetdata[5], nil, map[string]string{"connection": "postgresql://Username123:secret@localhost"}, 5, "s5-postgresql-scaler_sql_data"}, } func TestPosgresSQLGetMetricSpecForScaling(t *testing.T) { for _, testData := range postgreSQLMetricIdentifiers { - meta, err := parsePostgreSQLMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authParam}) + meta, err := parsePostgreSQLMetadata(&ScalerConfig{ResolvedEnv: testData.resolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.authParam, ScalerIndex: testData.scaleIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/prometheus_scaler.go b/pkg/scalers/prometheus_scaler.go index 97835edcd50..1a0d9e45a0a 100644 --- a/pkg/scalers/prometheus_scaler.go +++ b/pkg/scalers/prometheus_scaler.go @@ -55,6 +55,8 @@ type prometheusMetadata struct { cert string key string ca string + + scalerIndex int } type promQueryResult struct { @@ -125,6 +127,8 @@ func parsePrometheusMetadata(config *ScalerConfig) (*prometheusMetadata, error) meta.threshold = t } + meta.scalerIndex = config.ScalerIndex + authModes, ok := config.TriggerMetadata["authModes"] // no authMode specified if !ok { @@ -198,9 +202,10 @@ func (s *prometheusScaler) Close() error { func (s *prometheusScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(int64(s.metadata.threshold), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s", "prometheus", s.metadata.metricName)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "prometheus", s.metadata.metricName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/prometheus_scaler_test.go b/pkg/scalers/prometheus_scaler_test.go index 0bba2bcea44..48a0c481dfa 100644 --- a/pkg/scalers/prometheus_scaler_test.go +++ b/pkg/scalers/prometheus_scaler_test.go @@ -13,6 +13,7 @@ type parsePrometheusMetadataTestData struct { type prometheusMetricIdentifier struct { metadataTestData *parsePrometheusMetadataTestData + scalerIndex int name string } @@ -33,7 +34,8 @@ var testPromMetadata = []parsePrometheusMetadataTestData{ } var prometheusMetricIdentifiers = []prometheusMetricIdentifier{ - {&testPromMetadata[1], "prometheus-http_requests_total"}, + {&testPromMetadata[1], 0, "s0-prometheus-http-http_requests_total"}, + {&testPromMetadata[1], 1, "s1-prometheus-http-http_requests_total"}, } type prometheusAuthMetadataTestData struct { @@ -79,7 +81,7 @@ func TestPrometheusParseMetadata(t *testing.T) { func TestPrometheusGetMetricSpecForScaling(t *testing.T) { for _, testData := range prometheusMetricIdentifiers { - meta, err := parsePrometheusMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata}) + meta, err := parsePrometheusMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/rabbitmq_scaler.go b/pkg/scalers/rabbitmq_scaler.go index 0fcee476817..81e51c2aa15 100644 --- a/pkg/scalers/rabbitmq_scaler.go +++ b/pkg/scalers/rabbitmq_scaler.go @@ -448,7 +448,6 @@ func (s *rabbitMQScaler) getQueueInfoViaHTTP() (*queueInfo, error) { // GetMetricSpecForScaling returns the MetricSpec for the Horizontal Pod Autoscaler func (s *rabbitMQScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { metricValue := resource.NewQuantity(int64(s.metadata.value), resource.DecimalSI) - externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, s.metadata.metricName), diff --git a/pkg/scalers/redis_scaler.go b/pkg/scalers/redis_scaler.go index 8ae551482bc..27903b87b80 100644 --- a/pkg/scalers/redis_scaler.go +++ b/pkg/scalers/redis_scaler.go @@ -45,6 +45,7 @@ type redisMetadata struct { listName string databaseIndex int connectionInfo redisConnectionInfo + scalerIndex int } var redisLog = logf.Log.WithName("redis_scaler") @@ -170,7 +171,7 @@ func parseRedisMetadata(config *ScalerConfig, parserFn redisAddressParser) (*red } meta.databaseIndex = int(dbIndex) } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -193,9 +194,10 @@ func (s *redisScaler) Close() error { // GetMetricSpecForScaling returns the metric spec for the HPA func (s *redisScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetListLengthQty := resource.NewQuantity(int64(s.metadata.targetListLength), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s", "redis", s.metadata.listName)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s", "redis", s.metadata.listName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/redis_scaler_test.go b/pkg/scalers/redis_scaler_test.go index aacae87a03e..a3b1cd23420 100644 --- a/pkg/scalers/redis_scaler_test.go +++ b/pkg/scalers/redis_scaler_test.go @@ -21,6 +21,7 @@ type parseRedisMetadataTestData struct { type redisMetricIdentifier struct { metadataTestData *parseRedisMetadataTestData + scalerIndex int name string } @@ -51,7 +52,8 @@ var testRedisMetadata = []parseRedisMetadataTestData{ {map[string]string{"listName": "mylist", "listLength": "0"}, true, map[string]string{"host": "localhost"}}} var redisMetricIdentifiers = []redisMetricIdentifier{ - {&testRedisMetadata[1], "redis-mylist"}, + {&testRedisMetadata[1], 0, "s0-redis-mylist"}, + {&testRedisMetadata[1], 1, "s1-redis-mylist"}, } func TestRedisParseMetadata(t *testing.T) { @@ -70,7 +72,7 @@ func TestRedisParseMetadata(t *testing.T) { func TestRedisGetMetricSpecForScaling(t *testing.T) { for _, testData := range redisMetricIdentifiers { - meta, err := parseRedisMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testRedisResolvedEnv, AuthParams: testData.metadataTestData.authParams}, parseRedisAddress) + meta, err := parseRedisMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: testRedisResolvedEnv, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}, parseRedisAddress) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/redis_streams_scaler.go b/pkg/scalers/redis_streams_scaler.go index 930effb3082..3183088b1a2 100644 --- a/pkg/scalers/redis_streams_scaler.go +++ b/pkg/scalers/redis_streams_scaler.go @@ -41,6 +41,7 @@ type redisStreamsMetadata struct { consumerGroupName string databaseIndex int connectionInfo redisConnectionInfo + scalerIndex int } var redisStreamsLog = logf.Log.WithName("redis_streams_scaler") @@ -159,7 +160,7 @@ func parseRedisStreamsMetadata(config *ScalerConfig, parseFn redisAddressParser) } meta.databaseIndex = int(dbIndex) } - + meta.scalerIndex = config.ScalerIndex return &meta, nil } @@ -182,9 +183,10 @@ func (s *redisStreamsScaler) Close() error { // GetMetricSpecForScaling returns the metric spec for the HPA func (s *redisStreamsScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetPendingEntriesCount := resource.NewQuantity(int64(s.metadata.targetPendingEntriesCount), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "redis-streams", s.metadata.streamName, s.metadata.consumerGroupName)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "redis-streams", s.metadata.streamName, s.metadata.consumerGroupName)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/redis_streams_scaler_test.go b/pkg/scalers/redis_streams_scaler_test.go index 187d01bf89a..d840494f996 100644 --- a/pkg/scalers/redis_streams_scaler_test.go +++ b/pkg/scalers/redis_streams_scaler_test.go @@ -116,6 +116,7 @@ type redisStreamsTestMetadata struct { func TestRedisStreamsGetMetricSpecForScaling(t *testing.T) { type redisStreamsMetricIdentifier struct { metadataTestData *redisStreamsTestMetadata + scalerIndex int name string } @@ -127,11 +128,12 @@ func TestRedisStreamsGetMetricSpecForScaling(t *testing.T) { } var redisStreamMetricIdentifiers = []redisStreamsMetricIdentifier{ - {&redisStreamsTestData[0], "redis-streams-my-stream-my-stream-consumer-group"}, + {&redisStreamsTestData[0], 0, "s0-redis-streams-my-stream-my-stream-consumer-group"}, + {&redisStreamsTestData[0], 1, "s1-redis-streams-my-stream-my-stream-consumer-group"}, } for _, testData := range redisStreamMetricIdentifiers { - meta, err := parseRedisStreamsMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: map[string]string{"REDIS_SERVICE": "my-address"}, AuthParams: testData.metadataTestData.authParams}, parseRedisAddress) + meta, err := parseRedisStreamsMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ResolvedEnv: map[string]string{"REDIS_SERVICE": "my-address"}, AuthParams: testData.metadataTestData.authParams, ScalerIndex: testData.scalerIndex}, parseRedisAddress) if err != nil { t.Fatal("Could not parse metadata:", err) } diff --git a/pkg/scalers/selenium_grid_scaler.go b/pkg/scalers/selenium_grid_scaler.go index dd8a92f9e7f..ced7eaaa0c5 100644 --- a/pkg/scalers/selenium_grid_scaler.go +++ b/pkg/scalers/selenium_grid_scaler.go @@ -30,6 +30,7 @@ type seleniumGridScalerMetadata struct { browserName string targetValue int64 browserVersion string + scalerIndex int } type seleniumResponse struct { @@ -128,7 +129,7 @@ func (s *seleniumGridScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "seleniumgrid", s.metadata.url, s.metadata.browserName, s.metadata.browserVersion)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: metricName, + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/solace_scaler.go b/pkg/scalers/solace_scaler.go index 34b77c69b9a..b634b2e1de8 100644 --- a/pkg/scalers/solace_scaler.go +++ b/pkg/scalers/solace_scaler.go @@ -75,6 +75,8 @@ type SolaceMetadata struct { // Target Message Count msgCountTarget int msgSpoolUsageTarget int // Spool Use Target in Megabytes + // Scaler index + scalerIndex int } // SEMP API Response Root Struct @@ -239,9 +241,10 @@ func (s *SolaceScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { // Message Count Target Spec if s.metadata.msgCountTarget > 0 { targetMetricValue := resource.NewQuantity(int64(s.metadata.msgCountTarget), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", solaceScalerID, s.metadata.messageVpn, s.metadata.queueName, solaceTriggermsgcount)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", solaceScalerID, s.metadata.messageVpn, s.metadata.queueName, solaceTriggermsgcount)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, @@ -254,9 +257,10 @@ func (s *SolaceScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { // Message Spool Usage Target Spec if s.metadata.msgSpoolUsageTarget > 0 { targetMetricValue := resource.NewQuantity(int64(s.metadata.msgSpoolUsageTarget), resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", solaceScalerID, s.metadata.messageVpn, s.metadata.queueName, solaceTriggermsgspoolusage)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", solaceScalerID, s.metadata.messageVpn, s.metadata.queueName, solaceTriggermsgspoolusage)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/solace_scaler_test.go b/pkg/scalers/solace_scaler_test.go index 613f917f8f5..fbd270e2247 100644 --- a/pkg/scalers/solace_scaler_test.go +++ b/pkg/scalers/solace_scaler_test.go @@ -9,9 +9,10 @@ import ( ) type testSolaceMetadata struct { - testID string - metadata map[string]string - isError bool + testID string + metadata map[string]string + scalerIndex int + isError bool } var ( @@ -43,6 +44,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ // Empty { "#001 - EMPTY", map[string]string{}, + 0, true, }, // +Case - brokerBaseUrl @@ -59,6 +61,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 1, false, }, // -Case - missing username (clear) @@ -74,6 +77,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 1, true, }, // -Case - missing password (clear) @@ -89,6 +93,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 0, true, }, // -Case - missing queue @@ -104,6 +109,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: "", solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 1, true, }, // -Case - missing msgCountTarget @@ -120,6 +126,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaMsgCountTarget: "", solaceMetaMsgSpoolUsageTarget: "", }, + 0, true, }, // -Case - msgSpoolUsageTarget non-numeric @@ -135,6 +142,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: "NOT_AN_INTEGER", }, + 1, true, }, // -Case - msgSpoolUsage non-numeric @@ -150,6 +158,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgSpoolUsageTarget: "NOT_AN_INTEGER", }, + 0, true, }, // +Case - Pass with msgSpoolUsageTarget and not msgCountTarget @@ -165,6 +174,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, + 1, false, }, } @@ -183,6 +193,7 @@ var testSolaceEnvCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 0, false, }, // -Case - Should fail with ENV var not found @@ -198,6 +209,7 @@ var testSolaceEnvCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 1, true, }, } @@ -218,6 +230,7 @@ var testSolaceK8sSecretCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 1, false, }, // +Case - should find creds @@ -233,6 +246,7 @@ var testSolaceK8sSecretCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 0, false, }, // +Case - Should find with creds @@ -248,6 +262,7 @@ var testSolaceK8sSecretCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, + 1, false, }, } @@ -266,6 +281,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: soltestValidMsgCountTarget, // solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, + 0, false, }, { @@ -281,6 +297,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ // solaceMetaMsgCountTarget: soltestValidMsgCountTarget, solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, + 1, false, }, { @@ -296,6 +313,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: soltestValidMsgCountTarget, solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, + 0, false, }, { @@ -311,6 +329,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ // solaceMetaMsgCountTarget: soltestValidMsgCountTarget, // solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, + 1, true, }, { @@ -326,6 +345,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: "0", solaceMetaMsgSpoolUsageTarget: "0", }, + 0, true, }, { @@ -341,19 +361,22 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: "0", solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, + 0, false, }, } var testSolaceExpectedMetricNames = map[string]string{ - solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgcount: "", - solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgspoolusage: "", + "s0-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgcount: "", + "s0-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgspoolusage: "", + "s1-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgcount: "", + "s1-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgspoolusage: "", } func TestSolaceParseSolaceMetadata(t *testing.T) { for _, testData := range testParseSolaceMetadata { fmt.Print(testData.testID) - _, err := parseSolaceMetadata(&ScalerConfig{ResolvedEnv: nil, TriggerMetadata: testData.metadata, AuthParams: nil}) + _, err := parseSolaceMetadata(&ScalerConfig{ResolvedEnv: nil, TriggerMetadata: testData.metadata, AuthParams: nil, ScalerIndex: testData.scalerIndex}) switch { case err != nil && !testData.isError: t.Error("expected success but got error: ", err) @@ -367,7 +390,7 @@ func TestSolaceParseSolaceMetadata(t *testing.T) { } for _, testData := range testSolaceEnvCreds { fmt.Print(testData.testID) - _, err := parseSolaceMetadata(&ScalerConfig{ResolvedEnv: testDataSolaceResolvedEnvVALID, TriggerMetadata: testData.metadata, AuthParams: nil}) + _, err := parseSolaceMetadata(&ScalerConfig{ResolvedEnv: testDataSolaceResolvedEnvVALID, TriggerMetadata: testData.metadata, AuthParams: nil, ScalerIndex: testData.scalerIndex}) switch { case err != nil && !testData.isError: t.Error("expected success but got error: ", err) @@ -381,7 +404,7 @@ func TestSolaceParseSolaceMetadata(t *testing.T) { } for _, testData := range testSolaceK8sSecretCreds { fmt.Print(testData.testID) - _, err := parseSolaceMetadata(&ScalerConfig{ResolvedEnv: nil, TriggerMetadata: testData.metadata, AuthParams: testDataSolaceAuthParamsVALID}) + _, err := parseSolaceMetadata(&ScalerConfig{ResolvedEnv: nil, TriggerMetadata: testData.metadata, AuthParams: testDataSolaceAuthParamsVALID, ScalerIndex: testData.scalerIndex}) switch { case err != nil && !testData.isError: t.Error("expected success but got error: ", err) @@ -401,7 +424,7 @@ func TestSolaceGetMetricSpec(t *testing.T) { fmt.Print(testData.testID) var err error var solaceMeta *SolaceMetadata - solaceMeta, err = parseSolaceMetadata(&ScalerConfig{ResolvedEnv: testDataSolaceResolvedEnvVALID, TriggerMetadata: testData.metadata, AuthParams: testDataSolaceAuthParamsVALID}) + solaceMeta, err = parseSolaceMetadata(&ScalerConfig{ResolvedEnv: testDataSolaceResolvedEnvVALID, TriggerMetadata: testData.metadata, AuthParams: testDataSolaceAuthParamsVALID, ScalerIndex: testData.scalerIndex}) if err != nil { fmt.Printf("\n Failed to parse metadata: %v", err) } else { diff --git a/pkg/scalers/stan_scaler.go b/pkg/scalers/stan_scaler.go index 64d521af751..fe21fe41d75 100644 --- a/pkg/scalers/stan_scaler.go +++ b/pkg/scalers/stan_scaler.go @@ -50,6 +50,7 @@ type stanMetadata struct { durableName string subject string lagThreshold int64 + scalerIndex int } const ( @@ -106,6 +107,7 @@ func parseStanMetadata(config *ScalerConfig) (stanMetadata, error) { meta.lagThreshold = t } + meta.scalerIndex = config.ScalerIndex return meta, nil } @@ -196,9 +198,10 @@ func (s *stanScaler) hasPendingMessage() bool { func (s *stanScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { targetMetricValue := resource.NewQuantity(s.metadata.lagThreshold, resource.DecimalSI) + metricName := kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "stan", s.metadata.queueGroup, s.metadata.durableName, s.metadata.subject)) externalMetric := &v2beta2.ExternalMetricSource{ Metric: v2beta2.MetricIdentifier{ - Name: kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s-%s", "stan", s.metadata.queueGroup, s.metadata.durableName, s.metadata.subject)), + Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, metricName), }, Target: v2beta2.MetricTarget{ Type: v2beta2.AverageValueMetricType, diff --git a/pkg/scalers/stan_scaler_test.go b/pkg/scalers/stan_scaler_test.go index d1915d005d3..a842d6233fd 100644 --- a/pkg/scalers/stan_scaler_test.go +++ b/pkg/scalers/stan_scaler_test.go @@ -12,6 +12,7 @@ type parseStanMetadataTestData struct { type stanMetricIdentifier struct { metadataTestData *parseStanMetadataTestData + scalerIndex int name string } @@ -29,7 +30,8 @@ var testStanMetadata = []parseStanMetadataTestData{ } var stanMetricIdentifiers = []stanMetricIdentifier{ - {&testStanMetadata[4], "stan-grp1-ImDurable-mySubject"}, + {&testStanMetadata[4], 0, "s0-stan-grp1-ImDurable-mySubject"}, + {&testStanMetadata[4], 1, "s1-stan-grp1-ImDurable-mySubject"}, } func TestStanParseMetadata(t *testing.T) { @@ -46,7 +48,7 @@ func TestStanParseMetadata(t *testing.T) { func TestStanGetMetricSpecForScaling(t *testing.T) { for _, testData := range stanMetricIdentifiers { - meta, err := parseStanMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata}) + meta, err := parseStanMetadata(&ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, ScalerIndex: testData.scalerIndex}) if err != nil { t.Fatal("Could not parse metadata:", err) } From 2182f762f457a10ea94057ab9f87ad77110b932d Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Sun, 10 Oct 2021 17:47:21 +0200 Subject: [PATCH 03/11] Fix test Signed-off-by: Jorge Turrado --- controllers/keda/scaledobject_controller_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/controllers/keda/scaledobject_controller_test.go b/controllers/keda/scaledobject_controller_test.go index c69b5ed61e2..d3ca4e5da84 100644 --- a/controllers/keda/scaledobject_controller_test.go +++ b/controllers/keda/scaledobject_controller_test.go @@ -245,8 +245,8 @@ var _ = Describe("ScaledObjectController", func() { return k8sClient.Get(context.Background(), types.NamespacedName{Name: "keda-hpa-clean-up-test", Namespace: "default"}, hpa) }).ShouldNot(HaveOccurred()) Expect(hpa.Spec.Metrics).To(HaveLen(2)) - Expect(hpa.Spec.Metrics[0].External.Metric.Name).To(Equal("cron-UTC-0xxxx-1xxxx")) - Expect(hpa.Spec.Metrics[1].External.Metric.Name).To(Equal("cron-UTC-2xxxx-3xxxx")) + Expect(hpa.Spec.Metrics[0].External.Metric.Name).To(Equal("s0-cron-UTC-0xxxx-1xxxx")) + Expect(hpa.Spec.Metrics[1].External.Metric.Name).To(Equal("s1-cron-UTC-2xxxx-3xxxx")) // Remove the second trigger. Eventually(func() error { @@ -263,7 +263,7 @@ var _ = Describe("ScaledObjectController", func() { return len(hpa.Spec.Metrics) }).Should(Equal(1)) // And it should only be the first one left. - Expect(hpa.Spec.Metrics[0].External.Metric.Name).To(Equal("cron-UTC-0xxxx-1xxxx")) + Expect(hpa.Spec.Metrics[0].External.Metric.Name).To(Equal("s0-cron-UTC-0xxxx-1xxxx")) }) It("deploys ScaledObject and creates HPA, when IdleReplicaCount, MinReplicaCount and MaxReplicaCount is defined", func() { From 51fea821950369f73d487aa0ddf4f5225f156f22 Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Sun, 10 Oct 2021 17:49:29 +0200 Subject: [PATCH 04/11] Update CHANGELOG Signed-off-by: Jorge Turrado --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1687cf72be..e206be7b04c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,8 +41,12 @@ - Add support to get connection data from Trigger Authorization in MSSQL Scaler ([#2112](https://github.com/kedacore/keda/pull/2112)) - Add support to get connection data from Trigger Authorization in PostgreSQL Scaler ([#2114](https://github.com/kedacore/keda/pull/2114)) - Add support to provide the metric name in Azure Log Analytics Scaler ([#2106](https://github.com/kedacore/keda/pull/2106)) +<<<<<<< HEAD - Add `pageSize` (using regex) in RabbitMQ Scaler ([#2162](https://github.com/kedacore/keda/pull/2162)) - Add `unsafeSsl` parameter in InfluxDB scaler ([#2157](https://github.com/kedacore/keda/pull/2157)) +======= +- Improve metric name creation to be unique using scaler index inside the scaler ([#2161](https://github.com/kedacore/keda/pull/2161)) +>>>>>>> 1298324 (Update CHANGELOG) ### Breaking Changes From 99037a7d61932c655904260f5666d3f4c407bfe8 Mon Sep 17 00:00:00 2001 From: jorturfer Date: Sun, 10 Oct 2021 21:30:43 +0000 Subject: [PATCH 05/11] Add missing changes Signed-off-by: jorturfer --- pkg/scalers/selenium_grid_scaler.go | 1 + pkg/scalers/solace_scaler.go | 3 +++ pkg/scalers/solace_scaler_test.go | 22 ++++++++++------------ 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/pkg/scalers/selenium_grid_scaler.go b/pkg/scalers/selenium_grid_scaler.go index ced7eaaa0c5..2084ae8448d 100644 --- a/pkg/scalers/selenium_grid_scaler.go +++ b/pkg/scalers/selenium_grid_scaler.go @@ -101,6 +101,7 @@ func parseSeleniumGridScalerMetadata(config *ScalerConfig) (*seleniumGridScalerM meta.browserVersion = DefaultBrowserVersion } + meta.scalerIndex = config.ScalerIndex return &meta, nil } diff --git a/pkg/scalers/solace_scaler.go b/pkg/scalers/solace_scaler.go index b634b2e1de8..393bff7df78 100644 --- a/pkg/scalers/solace_scaler.go +++ b/pkg/scalers/solace_scaler.go @@ -187,6 +187,9 @@ func parseSolaceMetadata(config *ScalerConfig) (*SolaceMetadata, error) { if meta.username, meta.password, e = getSolaceSempCredentials(config); e != nil { return nil, e } + + meta.scalerIndex = config.ScalerIndex + return &meta, nil } diff --git a/pkg/scalers/solace_scaler_test.go b/pkg/scalers/solace_scaler_test.go index fbd270e2247..553c51b18eb 100644 --- a/pkg/scalers/solace_scaler_test.go +++ b/pkg/scalers/solace_scaler_test.go @@ -44,7 +44,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ // Empty { "#001 - EMPTY", map[string]string{}, - 0, + 1, true, }, // +Case - brokerBaseUrl @@ -93,7 +93,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, - 0, + 1, true, }, // -Case - missing queue @@ -126,7 +126,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaMsgCountTarget: "", solaceMetaMsgSpoolUsageTarget: "", }, - 0, + 1, true, }, // -Case - msgSpoolUsageTarget non-numeric @@ -158,7 +158,7 @@ var testParseSolaceMetadata = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgSpoolUsageTarget: "NOT_AN_INTEGER", }, - 0, + 1, true, }, // +Case - Pass with msgSpoolUsageTarget and not msgCountTarget @@ -193,7 +193,7 @@ var testSolaceEnvCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, - 0, + 1, false, }, // -Case - Should fail with ENV var not found @@ -246,7 +246,7 @@ var testSolaceK8sSecretCreds = []testSolaceMetadata{ solaceMetaQueueName: soltestValidQueueName, solaceMetaMsgCountTarget: soltestValidMsgCountTarget, }, - 0, + 1, false, }, // +Case - Should find with creds @@ -281,7 +281,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: soltestValidMsgCountTarget, // solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, - 0, + 1, false, }, { @@ -313,7 +313,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: soltestValidMsgCountTarget, solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, - 0, + 1, false, }, { @@ -345,7 +345,7 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: "0", solaceMetaMsgSpoolUsageTarget: "0", }, - 0, + 1, true, }, { @@ -361,14 +361,12 @@ var testSolaceGetMetricSpecData = []testSolaceMetadata{ solaceMetaMsgCountTarget: "0", solaceMetaMsgSpoolUsageTarget: soltestValidMsgSpoolTarget, }, - 0, + 1, false, }, } var testSolaceExpectedMetricNames = map[string]string{ - "s0-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgcount: "", - "s0-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgspoolusage: "", "s1-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgcount: "", "s1-" + solaceScalerID + "-" + soltestValidVpn + "-" + soltestValidQueueName + "-" + solaceTriggermsgspoolusage: "", } From 1dd8d2080bd21c436b78a67c9fdd3aa8d3900df5 Mon Sep 17 00:00:00 2001 From: jorturfer Date: Sun, 10 Oct 2021 22:53:42 +0000 Subject: [PATCH 06/11] Document the new convention about how to create metric names inside scalers Signed-off-by: jorturfer --- CREATE-NEW-SCALER.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CREATE-NEW-SCALER.md b/CREATE-NEW-SCALER.md index b3eb1841bf6..10387066019 100644 --- a/CREATE-NEW-SCALER.md +++ b/CREATE-NEW-SCALER.md @@ -44,6 +44,11 @@ The return type of this function is `MetricSpec`, but in KEDA's case we will mos - `TargetValue`: is the value of the metric we want to reach at all times at all costs. As long as the current metric doesn't match TargetValue, HPA will increase the number of the pods until it reaches the maximum number of pods allowed to scale to. - `TargetAverageValue`: the value of the metric for which we require one pod to handle. e.g. if we are have a scaler based on the length of a message queue, and we specificy 10 for `TargetAverageValue`, we are saying that each pod will handle 10 messages. So if the length of the queue becomes 30, we expect that we have 3 pods in our cluster. (`TargetAverage` and `TargetValue` are mutually exclusive) +>**Note:** All scalers receive a parameter named `scalerIndex` as part of `ScalerConfig`. This value is the index of the current scaler inside the current ScaledObject. All metric names have to start with `sX-` (where `X` is scalerIndex). This convention makes the metric unique inside the ScaledObject and brings the option to have more than 1 "similar metric name" inside the same ScaledObject. +For example: +>- s0-redis-mylist +>- s1-redis-mylist + ### IsActive For some reason, the scaler might need to declare itself as in-active, and the way it can do this is through implementing the function `IsActive`. From b1391fece77703a8cd2fb8842804fb7a7629ef5b Mon Sep 17 00:00:00 2001 From: jorturfer Date: Mon, 11 Oct 2021 14:46:06 +0000 Subject: [PATCH 07/11] Add a reference to GenerateMetricNameWithIndex() Signed-off-by: jorturfer --- CREATE-NEW-SCALER.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CREATE-NEW-SCALER.md b/CREATE-NEW-SCALER.md index 10387066019..29e12bdc1aa 100644 --- a/CREATE-NEW-SCALER.md +++ b/CREATE-NEW-SCALER.md @@ -44,7 +44,8 @@ The return type of this function is `MetricSpec`, but in KEDA's case we will mos - `TargetValue`: is the value of the metric we want to reach at all times at all costs. As long as the current metric doesn't match TargetValue, HPA will increase the number of the pods until it reaches the maximum number of pods allowed to scale to. - `TargetAverageValue`: the value of the metric for which we require one pod to handle. e.g. if we are have a scaler based on the length of a message queue, and we specificy 10 for `TargetAverageValue`, we are saying that each pod will handle 10 messages. So if the length of the queue becomes 30, we expect that we have 3 pods in our cluster. (`TargetAverage` and `TargetValue` are mutually exclusive) ->**Note:** All scalers receive a parameter named `scalerIndex` as part of `ScalerConfig`. This value is the index of the current scaler inside the current ScaledObject. All metric names have to start with `sX-` (where `X` is scalerIndex). This convention makes the metric unique inside the ScaledObject and brings the option to have more than 1 "similar metric name" inside the same ScaledObject. +>**Note:** All scalers receive a parameter named `scalerIndex` as part of `ScalerConfig`. This value is the index of the current scaler inside the current ScaledObject. All metric names have to start with `sX-` (where `X` is scalerIndex). This convention makes the metric unique inside the ScaledObject and brings the option to have more than 1 "similar metric name" inside the same ScaledObject. +There is a naming helper function, `GenerateMetricNameWithIndex(scalerIndex int, metricName string)`, that receives the current index and the original metric name (without the prefix) and returns the concatenated string using the convention (we recommend its utilization). For example: >- s0-redis-mylist >- s1-redis-mylist From 2ab64a60241d5fc9a330caa3c7bdf170481579ea Mon Sep 17 00:00:00 2001 From: jorturfer Date: Mon, 11 Oct 2021 15:15:47 +0000 Subject: [PATCH 08/11] Refactor a bit the doc Signed-off-by: jorturfer --- CREATE-NEW-SCALER.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/CREATE-NEW-SCALER.md b/CREATE-NEW-SCALER.md index 29e12bdc1aa..760148d0e0e 100644 --- a/CREATE-NEW-SCALER.md +++ b/CREATE-NEW-SCALER.md @@ -44,11 +44,14 @@ The return type of this function is `MetricSpec`, but in KEDA's case we will mos - `TargetValue`: is the value of the metric we want to reach at all times at all costs. As long as the current metric doesn't match TargetValue, HPA will increase the number of the pods until it reaches the maximum number of pods allowed to scale to. - `TargetAverageValue`: the value of the metric for which we require one pod to handle. e.g. if we are have a scaler based on the length of a message queue, and we specificy 10 for `TargetAverageValue`, we are saying that each pod will handle 10 messages. So if the length of the queue becomes 30, we expect that we have 3 pods in our cluster. (`TargetAverage` and `TargetValue` are mutually exclusive) ->**Note:** All scalers receive a parameter named `scalerIndex` as part of `ScalerConfig`. This value is the index of the current scaler inside the current ScaledObject. All metric names have to start with `sX-` (where `X` is scalerIndex). This convention makes the metric unique inside the ScaledObject and brings the option to have more than 1 "similar metric name" inside the same ScaledObject. -There is a naming helper function, `GenerateMetricNameWithIndex(scalerIndex int, metricName string)`, that receives the current index and the original metric name (without the prefix) and returns the concatenated string using the convention (we recommend its utilization). +All scalers receive a parameter named `scalerIndex` as part of `ScalerConfig`. This value is the index of the current scaler in a ScaledObject. All metric names have to start with `sX-` (where `X` is `scalerIndex`). This convention makes the metric name unique in the ScaledObject and brings the option to have more than 1 "similar metric name" defined in a ScaledObject. + For example: ->- s0-redis-mylist ->- s1-redis-mylist +- s0-redis-mylist +- s1-redis-mylist + +>**Note:** There is a naming helper function `GenerateMetricNameWithIndex(scalerIndex int, metricName string)`, that receives the current index and the original metric name (without the prefix) and returns the concatenated string using the convention (please use this function). + ### IsActive From 71375ecf2bef675f498347406734b3536cbdd9a0 Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Wed, 13 Oct 2021 11:03:41 +0200 Subject: [PATCH 09/11] Add example about how to use GenerateMetricNameWithIndex() Signed-off-by: Jorge Turrado --- CREATE-NEW-SCALER.md | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/CREATE-NEW-SCALER.md b/CREATE-NEW-SCALER.md index 760148d0e0e..06526b15adf 100644 --- a/CREATE-NEW-SCALER.md +++ b/CREATE-NEW-SCALER.md @@ -50,7 +50,23 @@ For example: - s0-redis-mylist - s1-redis-mylist ->**Note:** There is a naming helper function `GenerateMetricNameWithIndex(scalerIndex int, metricName string)`, that receives the current index and the original metric name (without the prefix) and returns the concatenated string using the convention (please use this function). +>**Note:** There is a naming helper function `GenerateMetricNameWithIndex(scalerIndex int, metricName string)`, that receives the current index and the original metric name (without the prefix) and returns the concatenated string using the convention (please use this function).
Next lines are an example about how to use it: +>```golang +>func (s *artemisScaler) GetMetricSpecForScaling() []v2beta2.MetricSpec { +> targetMetricValue := resource.NewQuantity(int64(s.metadata.queueLength), resource.DecimalSI) +> externalMetric := &v2beta2.ExternalMetricSource{ +> Metric: v2beta2.MetricIdentifier{ +> Name: GenerateMetricNameWithIndex(s.metadata.scalerIndex, kedautil.NormalizeString(fmt.Sprintf("%s-%s-%s", "artemis", s.metadata.brokerName, s.metadata.queueName))), +> }, +> Target: v2beta2.MetricTarget{ +> Type: v2beta2.AverageValueMetricType, +> AverageValue: targetMetricValue, +> }, +> } +> metricSpec := v2beta2.MetricSpec{External: externalMetric, Type: artemisMetricType} +> return []v2beta2.MetricSpec{metricSpec} +>} +>``` ### IsActive From 0cac030a5efdc00e43ca1a39cdd94874aec90c45 Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Wed, 13 Oct 2021 11:19:52 +0200 Subject: [PATCH 10/11] Fix merging typo Signed-off-by: Jorge Turrado --- CHANGELOG.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e206be7b04c..57202856280 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,12 +41,9 @@ - Add support to get connection data from Trigger Authorization in MSSQL Scaler ([#2112](https://github.com/kedacore/keda/pull/2112)) - Add support to get connection data from Trigger Authorization in PostgreSQL Scaler ([#2114](https://github.com/kedacore/keda/pull/2114)) - Add support to provide the metric name in Azure Log Analytics Scaler ([#2106](https://github.com/kedacore/keda/pull/2106)) -<<<<<<< HEAD - Add `pageSize` (using regex) in RabbitMQ Scaler ([#2162](https://github.com/kedacore/keda/pull/2162)) - Add `unsafeSsl` parameter in InfluxDB scaler ([#2157](https://github.com/kedacore/keda/pull/2157)) -======= - Improve metric name creation to be unique using scaler index inside the scaler ([#2161](https://github.com/kedacore/keda/pull/2161)) ->>>>>>> 1298324 (Update CHANGELOG) ### Breaking Changes From aaff2f096ca9ea1a96942dc72c860fd258c0002c Mon Sep 17 00:00:00 2001 From: Jorge Turrado Date: Wed, 13 Oct 2021 11:26:51 +0200 Subject: [PATCH 11/11] Fix merging typo in prometheus_scalers Signed-off-by: Jorge Turrado --- pkg/scalers/prometheus_scaler_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/scalers/prometheus_scaler_test.go b/pkg/scalers/prometheus_scaler_test.go index 48a0c481dfa..6f641db7eae 100644 --- a/pkg/scalers/prometheus_scaler_test.go +++ b/pkg/scalers/prometheus_scaler_test.go @@ -34,8 +34,8 @@ var testPromMetadata = []parsePrometheusMetadataTestData{ } var prometheusMetricIdentifiers = []prometheusMetricIdentifier{ - {&testPromMetadata[1], 0, "s0-prometheus-http-http_requests_total"}, - {&testPromMetadata[1], 1, "s1-prometheus-http-http_requests_total"}, + {&testPromMetadata[1], 0, "s0-prometheus-http_requests_total"}, + {&testPromMetadata[1], 1, "s1-prometheus-http_requests_total"}, } type prometheusAuthMetadataTestData struct {