Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

azurerm_data_factory_pipeline - support type for parameters and variables #27092

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
58 changes: 58 additions & 0 deletions internal/services/datafactory/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,31 @@ func flattenDataFactoryParameters(input map[string]*datafactory.ParameterSpecifi
return output
}

func expandDataFactoryParametersFourPointOh(input []interface{}) map[string]*datafactory.ParameterSpecification {
parameters := make(map[string]*datafactory.ParameterSpecification)
for _, v := range input {
val := v.(map[string]interface{})
parameters[val["name"].(string)] = &datafactory.ParameterSpecification{
Type: datafactory.ParameterType(val["type"].(string)),
DefaultValue: val["default_value"],
stephybun marked this conversation as resolved.
Show resolved Hide resolved
}
}
return parameters
}

func flattenDataFactoryParametersFourPointOh(input map[string]*datafactory.ParameterSpecification) []interface{} {
parameters := make([]interface{}, 0, len(input))
for k, v := range input {
param := map[string]interface{}{
"name": k,
"type": string(v.Type),
"default_value": v.DefaultValue,
}
parameters = append(parameters, param)
}
return parameters
}

func flattenDataFactoryAnnotations(input *[]interface{}) []string {
annotations := make([]string, 0)
if input == nil {
Expand Down Expand Up @@ -144,6 +169,39 @@ func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecifica
return output
}

func expandDataFactoryVariablesFourPointOh(input []interface{}) map[string]*datafactory.VariableSpecification {
variables := make(map[string]*datafactory.VariableSpecification)
for _, v := range input {
val := v.(map[string]interface{})

variables[val["name"].(string)] = &datafactory.VariableSpecification{
Type: datafactory.VariableType(val["type"].(string)),
DefaultValue: val["default_value"],
}
}
return variables
}

func flattenDataFactoryVariablesFourPointOh(input map[string]*datafactory.VariableSpecification) []interface{} {
variables := make([]interface{}, 0, len(input))
for k, v := range input {

// convert value to string if it is bool
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this only necessary for variable? How about parameters?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the api can do the conversion for parameters but not for variables

// this is needed because the API returns the default value as a bool
if _, ok := v.DefaultValue.(bool); ok {
v.DefaultValue = fmt.Sprintf("%v", v.DefaultValue)
}

variable := map[string]interface{}{
"name": k,
"type": string(v.Type),
"default_value": v.DefaultValue,
}
variables = append(variables, variable)
}
return variables
}

// DatasetColumn describes the attributes needed to specify a structure column for a dataset
type DatasetColumn struct {
Name string `json:"name,omitempty" tfschema:"name"`
Expand Down
105 changes: 94 additions & 11 deletions internal/services/datafactory/data_factory_pipeline_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"github.com/hashicorp/go-azure-sdk/resource-manager/datafactory/2018-06-01/factories"
"github.com/hashicorp/terraform-provider-azurerm/helpers/tf"
"github.com/hashicorp/terraform-provider-azurerm/internal/clients"
"github.com/hashicorp/terraform-provider-azurerm/internal/features"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/azuresdkhacks"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/parse"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/validate"
Expand All @@ -22,7 +23,7 @@ import (
)

func resourceDataFactoryPipeline() *pluginsdk.Resource {
return &pluginsdk.Resource{
resource := &pluginsdk.Resource{
Create: resourceDataFactoryPipelineCreateUpdate,
Read: resourceDataFactoryPipelineRead,
Update: resourceDataFactoryPipelineCreateUpdate,
Expand Down Expand Up @@ -109,6 +110,69 @@ func resourceDataFactoryPipeline() *pluginsdk.Resource {
},
},
}

if features.FourPointOhBeta() {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For future reference, these types of schema changes should use the inverted feature flag e.g.

schema: map[string]*pluginsdk.Schema{
// We put the desired breaking schema changes in here
}

if !features.FourPointOhBeta {
// We patch over the breaking schema change with the old definition here
}

This makes cleaning up old/deprecated code post major release much easier since the only action is to delete the entire block wrapped in the !features.FourPointOhBeta flag instead of having to copy paste the new behaviour into the schema above.

However.. since we're planning to ship 4.0 this week there isn't any need for the flag at all, this is an exceptional week where you can go ahead and make the breaking change directly in the schema and it would be acceptable and fine to merge.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@stephybun thanks for above.
Not relevant to the change required. Wanted to check what's the best practice to ensure the order is not checked when it comes to running terraform plan, e.g. I have made parameters a list of objects, when multiple paramters are declared, running plan will have diffs because of the order.
Looks like changing to TypeSet fix the problem, but not sure I should use TypeSet over TypeList.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@hqhqhqhqhqhqhqhqhqhqhq it seems like the API doesn't preserve the order of the parameters, so we won't have any other choice but to use a Set here

resource.Schema["parameters"] = &pluginsdk.Schema{
Type: pluginsdk.TypeList,
Optional: true,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add a validation func here, the name should not be empty.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

added

Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"default_value": {
Type: pluginsdk.TypeString,
Required: true,
},
"type": {
Type: pluginsdk.TypeString,
Optional: true,
Default: string(datafactory.ParameterTypeString),
ValidateFunc: validation.StringInSlice([]string{
string(datafactory.ParameterTypeString),
string(datafactory.ParameterTypeInt),
string(datafactory.ParameterTypeFloat),
string(datafactory.ParameterTypeBool),
string(datafactory.ParameterTypeArray),
string(datafactory.ParameterTypeObject),
string(datafactory.ParameterTypeSecureString),
}, false),
},
},
},
}

resource.Schema["variables"] = &pluginsdk.Schema{
Type: pluginsdk.TypeList,
Optional: true,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add a validation func here, the name should not be empty.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

added

Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"default_value": {
Type: pluginsdk.TypeString,
Required: true,
},
"type": {
Type: pluginsdk.TypeString,
Optional: true,
Default: string(datafactory.VariableTypeString),
ValidateFunc: validation.StringInSlice([]string{
string(datafactory.VariableTypeString),
string(datafactory.VariableTypeArray),
}, false),
},
},
},
}
}

return resource
}

func resourceDataFactoryPipelineCreateUpdate(d *pluginsdk.ResourceData, meta interface{}) error {
Expand Down Expand Up @@ -141,11 +205,17 @@ func resourceDataFactoryPipelineCreateUpdate(d *pluginsdk.ResourceData, meta int
}

pipeline := &azuresdkhacks.Pipeline{
Parameters: expandDataFactoryParameters(d.Get("parameters").(map[string]interface{})),
Variables: expandDataFactoryVariables(d.Get("variables").(map[string]interface{})),
Description: utils.String(d.Get("description").(string)),
}

if features.FourPointOhBeta() {
pipeline.Parameters = expandDataFactoryParametersFourPointOh(d.Get("parameters").([]interface{}))
pipeline.Variables = expandDataFactoryVariablesFourPointOh(d.Get("variables").([]interface{}))
} else {
pipeline.Parameters = expandDataFactoryParameters(d.Get("parameters").(map[string]interface{}))
pipeline.Variables = expandDataFactoryVariables(d.Get("variables").(map[string]interface{}))
}

if v, ok := d.GetOk("activities_json"); ok {
activities, err := deserializeDataFactoryPipelineActivities(v.(string))
if err != nil {
Expand Down Expand Up @@ -225,9 +295,27 @@ func resourceDataFactoryPipelineRead(d *pluginsdk.ResourceData, meta interface{}
if props := resp.Pipeline; props != nil {
d.Set("description", props.Description)

parameters := flattenDataFactoryParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("setting `parameters`: %+v", err)
if features.FourPointOhBeta() {
parameters := flattenDataFactoryParametersFourPointOh(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("setting `parameters`: %+v", err)
}

variables := flattenDataFactoryVariablesFourPointOh(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("setting `variables`: %+v", err)
}
} else {
parameters := flattenDataFactoryParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("setting `parameters`: %+v", err)
}

variables := flattenDataFactoryVariables(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("setting `variables`: %+v", err)
}

}

annotations := flattenDataFactoryAnnotations(props.Annotations)
Expand Down Expand Up @@ -255,11 +343,6 @@ func resourceDataFactoryPipelineRead(d *pluginsdk.ResourceData, meta interface{}
}
}

variables := flattenDataFactoryVariables(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("setting `variables`: %+v", err)
}

if activities := props.Activities; activities != nil {
activitiesJson, err := serializeDataFactoryPipelineActivities(activities)
if err != nil {
Expand Down
Loading
Loading