Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for autoscale for HDInsight clusters #8104

Closed
wants to merge 30 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
dbb754f
Capacity autoscale support
kosinsky Jul 27, 2020
4b51802
Recurrence based autoscale
kosinsky Jul 28, 2020
e43803b
dependencies: updating to v44.2.0 of github.com/Azure/azure-sdk-for-go
kosinsky Jul 28, 2020
2dccb67
Merge branch 'update-sdk-to-v44.2.0' into autoscale
kosinsky Jul 28, 2020
33cd8ab
Support for update for Autoscale
kosinsky Aug 6, 2020
ee6df81
disable_correlation_request_id=true to avoid broken clusters
kosinsky Aug 10, 2020
5b8ce1b
Autoscale for Spark clusters
kosinsky Aug 11, 2020
6f12a0a
Autoscale for HBase clusters
kosinsky Aug 11, 2020
fa8d2ca
Autoscale for Interactive Query cluster
kosinsky Aug 12, 2020
9a3eab9
Documentation changes
kosinsky Aug 12, 2020
2f6c5f3
target_instance_count in schedule
kosinsky Aug 12, 2020
78495ef
Formatting
kosinsky Aug 12, 2020
fa6ae35
Merge remote-tracking branch 'upstream/master' into autoscale
kosinsky Aug 12, 2020
43aa254
Merge remote-tracking branch 'upstream/master' into autoscale
kosinsky Aug 25, 2020
0b8a67a
Post merge fix
kosinsky Aug 25, 2020
64a450f
Tune comment about HDI limitation
kosinsky Aug 25, 2020
5dbedfe
Refactor FlattenHDInsightAutoscaleRecurrenceDefinition
kosinsky Sep 22, 2020
f04ff1c
Move HDI client workaround to the NewClient method
kosinsky Sep 23, 2020
2e98e39
Update website/docs/r/hdinsight_interactive_query_cluster.html.markdown
kosinsky Sep 23, 2020
47c9e7b
Update website/docs/r/hdinsight_spark_cluster.html.markdown
kosinsky Sep 23, 2020
a183c1e
Update website/docs/r/hdinsight_spark_cluster.html.markdown
kosinsky Sep 23, 2020
5b26af7
Sort instance counts in the doc
kosinsky Sep 23, 2020
6172e49
Update website/docs/r/hdinsight_hadoop_cluster.html.markdown
kosinsky Sep 23, 2020
8506785
Update website/docs/r/hdinsight_hadoop_cluster.html.markdown
kosinsky Sep 23, 2020
6fe972d
Update website/docs/r/hdinsight_hadoop_cluster.html.markdown
kosinsky Sep 23, 2020
8a1be26
Update website/docs/r/hdinsight_hbase_cluster.html.markdown
kosinsky Sep 23, 2020
9eba86e
Update website/docs/r/hdinsight_interactive_query_cluster.html.markdown
kosinsky Sep 23, 2020
ea53ccd
recurrence is required for HBase
kosinsky Sep 23, 2020
f369fd8
Merge branch 'autoscale' of github.com:kosinsky/terraform-provider-az…
kosinsky Sep 23, 2020
5a61b42
Sorted attributes alphabetically
kosinsky Sep 23, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 12 additions & 8 deletions azurerm/internal/services/hdinsight/client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,21 @@ type Client struct {
}

func NewClient(o *common.ClientOptions) *Client {
ApplicationsClient := hdinsight.NewApplicationsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&ApplicationsClient.Client, o.ResourceManagerAuthorizer)
// due to a bug in the HDInsight API we can't reuse client with the same x-ms-correlation-request-id for multiple updates
opts := *o
opts.DisableCorrelationRequestID = true

ClustersClient := hdinsight.NewClustersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&ClustersClient.Client, o.ResourceManagerAuthorizer)
ApplicationsClient := hdinsight.NewApplicationsClientWithBaseURI(opts.ResourceManagerEndpoint, opts.SubscriptionId)
opts.ConfigureClient(&ApplicationsClient.Client, opts.ResourceManagerAuthorizer)

ConfigurationsClient := hdinsight.NewConfigurationsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&ConfigurationsClient.Client, o.ResourceManagerAuthorizer)
ClustersClient := hdinsight.NewClustersClientWithBaseURI(opts.ResourceManagerEndpoint, opts.SubscriptionId)
opts.ConfigureClient(&ClustersClient.Client, opts.ResourceManagerAuthorizer)

ExtensionsClient := hdinsight.NewExtensionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&ExtensionsClient.Client, o.ResourceManagerAuthorizer)
ConfigurationsClient := hdinsight.NewConfigurationsClientWithBaseURI(opts.ResourceManagerEndpoint, opts.SubscriptionId)
opts.ConfigureClient(&ConfigurationsClient.Client, opts.ResourceManagerAuthorizer)

ExtensionsClient := hdinsight.NewExtensionsClientWithBaseURI(opts.ResourceManagerEndpoint, opts.SubscriptionId)
opts.ConfigureClient(&ExtensionsClient.Client, opts.ResourceManagerAuthorizer)

return &Client{
ApplicationsClient: &ApplicationsClient,
Expand Down
37 changes: 28 additions & 9 deletions azurerm/internal/services/hdinsight/common_hdinsight.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,18 +47,37 @@ func hdinsightClusterUpdate(clusterKind string, readFunc schema.ReadFunc) schema
roles := rolesRaw[0].(map[string]interface{})
workerNodes := roles["worker_node"].([]interface{})
workerNode := workerNodes[0].(map[string]interface{})
targetInstanceCount := workerNode["target_instance_count"].(int)
params := hdinsight.ClusterResizeParameters{
TargetInstanceCount: utils.Int32(int32(targetInstanceCount)),
}
if d.HasChange("roles.0.worker_node.0.target_instance_count") {
targetInstanceCount := workerNode["target_instance_count"].(int)
params := hdinsight.ClusterResizeParameters{
TargetInstanceCount: utils.Int32(int32(targetInstanceCount)),
}

future, err := client.Resize(ctx, resourceGroup, name, params)
if err != nil {
return fmt.Errorf("Error resizing the HDInsight %q Cluster %q (Resource Group %q): %+v", clusterKind, name, resourceGroup, err)
future, err := client.Resize(ctx, resourceGroup, name, params)
if err != nil {
return fmt.Errorf("Error resizing the HDInsight %q Cluster %q (Resource Group %q): %+v", clusterKind, name, resourceGroup, err)
}

if err = future.WaitForCompletionRef(ctx, client.Client); err != nil {
return fmt.Errorf("Error waiting for the HDInsight %q Cluster %q (Resource Group %q) to finish resizing: %+v", clusterKind, name, resourceGroup, err)
}
}

if err = future.WaitForCompletionRef(ctx, client.Client); err != nil {
return fmt.Errorf("Error waiting for the HDInsight %q Cluster %q (Resource Group %q) to finish resizing: %+v", clusterKind, name, resourceGroup, err)
if d.HasChange("roles.0.worker_node.0.autoscale") {
autoscale := ExpandHDInsightNodeAutoScaleDefinition(workerNode["autoscale"].([]interface{}))
params := hdinsight.AutoscaleConfigurationUpdateParameter{
Autoscale: autoscale,
}

future, err := client.UpdateAutoScaleConfiguration(ctx, resourceGroup, name, params)

if err != nil {
return fmt.Errorf("Error changing autoscale of the HDInsight %q Cluster %q (Resource Group %q): %+v", clusterKind, name, resourceGroup, err)
}

if err = future.WaitForCompletionRef(ctx, client.Client); err != nil {
return fmt.Errorf("Error waiting for changing autoscale of the HDInsight %q Cluster %q (Resource Group %q) to finish resizing: %+v", clusterKind, name, resourceGroup, err)
}
}
}

Expand Down
270 changes: 270 additions & 0 deletions azurerm/internal/services/hdinsight/hdinsight.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package hdinsight

import (
"fmt"
"regexp"
"strings"

"github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight"
Expand Down Expand Up @@ -584,6 +585,8 @@ type HDInsightNodeDefinition struct {
MaxNumberOfDisksPerNode *int
FixedMinInstanceCount *int32
FixedTargetInstanceCount *int32
CanAutoScaleByCapacity bool
CanAutoScaleOnSchedule bool
}

func ValidateSchemaHDInsightNodeDefinitionVMSize() schema.SchemaValidateFunc {
Expand Down Expand Up @@ -743,6 +746,108 @@ func SchemaHDInsightNodeDefinition(schemaLocation string, definition HDInsightNo
Required: true,
ValidateFunc: countValidation,
}

if definition.CanAutoScaleByCapacity || definition.CanAutoScaleOnSchedule {
autoScales := map[string]*schema.Schema{}

if definition.CanAutoScaleByCapacity {
autoScales["capacity"] = &schema.Schema{
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
ConflictsWith: []string{
fmt.Sprintf("%s.0.autoscale.0.recurrence", schemaLocation),
},
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"min_instance_count": {
Type: schema.TypeInt,
Required: true,
ValidateFunc: countValidation,
},
"max_instance_count": {
Type: schema.TypeInt,
Required: true,
ValidateFunc: countValidation,
},
},
},
}
if definition.CanAutoScaleOnSchedule {
autoScales["capacity"].ConflictsWith = []string{
fmt.Sprintf("%s.0.autoscale.0.recurrence", schemaLocation),
}
}
}
if definition.CanAutoScaleOnSchedule {
autoScales["recurrence"] = &schema.Schema{
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"timezone": {
Type: schema.TypeString,
Required: true,
},
"schedule": {
Type: schema.TypeList,
Required: true,
MinItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"time": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringMatch(
regexp.MustCompile("^([01][0-9]|[2][0-3]):([03][0])$"), // time must be on the hour or half past
"Time of day must match the format HH:mm where HH is 00-23 and mm is 00 or 30",
),
},
"days": {
Type: schema.TypeList,
Required: true,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.StringInSlice([]string{
string(hdinsight.Monday),
string(hdinsight.Tuesday),
string(hdinsight.Wednesday),
string(hdinsight.Thursday),
string(hdinsight.Friday),
string(hdinsight.Saturday),
string(hdinsight.Sunday),
}, false),
},
},

"target_instance_count": {
Type: schema.TypeInt,
Required: true,
ValidateFunc: countValidation,
},
},
},
},
},
},
}
if definition.CanAutoScaleByCapacity {
autoScales["recurrence"].ConflictsWith = []string{
fmt.Sprintf("%s.0.autoscale.0.capacity", schemaLocation),
}
}
}

result["autoscale"] = &schema.Schema{
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: autoScales,
},
}
}
}

if definition.CanSpecifyDisks {
Expand Down Expand Up @@ -823,6 +928,14 @@ func ExpandHDInsightNodeDefinition(name string, input []interface{}, definition

targetInstanceCount := v["target_instance_count"].(int)
role.TargetInstanceCount = utils.Int32(int32(targetInstanceCount))

if definition.CanAutoScaleByCapacity || definition.CanAutoScaleOnSchedule {
autoscaleRaw := v["autoscale"].([]interface{})
autoscale := ExpandHDInsightNodeAutoScaleDefinition(autoscaleRaw)
if autoscale != nil {
role.AutoscaleConfiguration = autoscale
}
}
} else {
role.MinInstanceCount = definition.FixedMinInstanceCount
role.TargetInstanceCount = definition.FixedTargetInstanceCount
Expand All @@ -842,6 +955,87 @@ func ExpandHDInsightNodeDefinition(name string, input []interface{}, definition
return &role, nil
}

func ExpandHDInsightNodeAutoScaleDefinition(input []interface{}) *hdinsight.Autoscale {
if len(input) == 0 {
return nil
}

vs := input[0].(map[string]interface{})

if vs["capacity"] != nil {
capacityRaw := vs["capacity"].([]interface{})

capacity := ExpandHDInsightAutoscaleCapacityDefinition(capacityRaw)
if capacity != nil {
return &hdinsight.Autoscale{
Capacity: capacity,
}
}
}

if vs["recurrence"] != nil {
recurrenceRaw := vs["recurrence"].([]interface{})
recurrence := ExpandHDInsightAutoscaleRecurrenceDefinition(recurrenceRaw)
if recurrence != nil {
return &hdinsight.Autoscale{
Recurrence: recurrence,
}
}
}

return nil
}

func ExpandHDInsightAutoscaleCapacityDefinition(input []interface{}) *hdinsight.AutoscaleCapacity {
if len(input) == 0 {
return nil
}

vs := input[0].(map[string]interface{})

return &hdinsight.AutoscaleCapacity{
MinInstanceCount: utils.Int32(int32(vs["min_instance_count"].(int))),
MaxInstanceCount: utils.Int32(int32(vs["max_instance_count"].(int))),
}
}

func ExpandHDInsightAutoscaleRecurrenceDefinition(input []interface{}) *hdinsight.AutoscaleRecurrence {
if len(input) == 0 {
return nil
}

vs := input[0].(map[string]interface{})

schedules := make([]hdinsight.AutoscaleSchedule, 0)

for _, v := range vs["schedule"].([]interface{}) {
val := v.(map[string]interface{})

weekDays := val["days"].([]interface{})
expandedWeekDays := make([]hdinsight.DaysOfWeek, len(weekDays))
for i := range weekDays {
expandedWeekDays[i] = hdinsight.DaysOfWeek(weekDays[i].(string))
}

schedules = append(schedules, hdinsight.AutoscaleSchedule{
Days: &expandedWeekDays,
TimeAndCapacity: &hdinsight.AutoscaleTimeAndCapacity{
Time: utils.String(val["time"].(string)),
// SDK supports min and max, but server side always overrides max to be equal to min
MinInstanceCount: utils.Int32(int32(val["target_instance_count"].(int))),
MaxInstanceCount: utils.Int32(int32(val["target_instance_count"].(int))),
},
})
}

result := &hdinsight.AutoscaleRecurrence{
TimeZone: utils.String(vs["timezone"].(string)),
Schedule: &schedules,
}

return result
}

func FlattenHDInsightNodeDefinition(input *hdinsight.Role, existing []interface{}, definition HDInsightNodeDefinition) []interface{} {
if input == nil {
return []interface{}{}
Expand Down Expand Up @@ -901,6 +1095,13 @@ func FlattenHDInsightNodeDefinition(input *hdinsight.Role, existing []interface{
if input.TargetInstanceCount != nil {
output["target_instance_count"] = int(*input.TargetInstanceCount)
}

if definition.CanAutoScaleByCapacity || definition.CanAutoScaleOnSchedule {
autoscale := FlattenHDInsightNodeAutoscaleDefinition(input.AutoscaleConfiguration)
if autoscale != nil {
output["autoscale"] = autoscale
}
}
}

if definition.CanSpecifyDisks {
Expand Down Expand Up @@ -952,3 +1153,72 @@ func FindHDInsightConnectivityEndpoint(name string, input *[]hdinsight.Connectiv

return ""
}

func FlattenHDInsightNodeAutoscaleDefinition(input *hdinsight.Autoscale) []interface{} {
if input == nil {
return nil
}

result := map[string]interface{}{}

if input.Capacity != nil {
result["capacity"] = FlattenHDInsightAutoscaleCapacityDefinition(input.Capacity)
}

if input.Recurrence != nil {
result["recurrence"] = FlattenHDInsightAutoscaleRecurrenceDefinition(input.Recurrence)
}

if len(result) > 0 {
return []interface{}{result}
}
return nil
}

func FlattenHDInsightAutoscaleCapacityDefinition(input *hdinsight.AutoscaleCapacity) []interface{} {
return []interface{}{
map[string]interface{}{
"min_instance_count": input.MinInstanceCount,
"max_instance_count": input.MaxInstanceCount,
},
}
}

func FlattenHDInsightAutoscaleRecurrenceDefinition(input *hdinsight.AutoscaleRecurrence) []interface{} {
if input.Schedule == nil {
return []interface{}{}
}

schedules := make([]interface{}, 0)

for _, schedule := range *input.Schedule {
days := make([]hdinsight.DaysOfWeek, 0)
if schedule.Days != nil {
days = *schedule.Days
}

targetInstanceCount := 0
time := ""
if schedule.TimeAndCapacity != nil {
if schedule.TimeAndCapacity.MinInstanceCount != nil {
// note: min / max are the same
targetInstanceCount = int(*schedule.TimeAndCapacity.MinInstanceCount)
}
if *schedule.TimeAndCapacity.Time != "" {
time = *schedule.TimeAndCapacity.Time
}
}
schedules = append(schedules, map[string]interface{}{
"days": days,
"target_instance_count": targetInstanceCount,
"time": time,
})
}

return []interface{}{
map[string]interface{}{
"timezone": input.TimeZone,
"schedule": &schedules,
},
}
}
Loading