diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/client.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/client.go new file mode 100644 index 000000000000..2272bc797ed8 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/client.go @@ -0,0 +1,52 @@ +// Package streamanalytics implements the Azure ARM Streamanalytics service API version . +// +// Stream Analytics Client +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/Azure/go-autorest/autorest" +) + +const ( + // DefaultBaseURI is the default URI used for the service Streamanalytics + DefaultBaseURI = "https://management.azure.com" +) + +// BaseClient is the base client for Streamanalytics. +type BaseClient struct { + autorest.Client + BaseURI string + SubscriptionID string +} + +// New creates an instance of the BaseClient client. +func New(subscriptionID string) BaseClient { + return NewWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewWithBaseURI creates an instance of the BaseClient client using a custom endpoint. Use this when interacting with +// an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient { + return BaseClient{ + Client: autorest.NewClientWithUserAgent(UserAgent()), + BaseURI: baseURI, + SubscriptionID: subscriptionID, + } +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/clusters.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/clusters.go new file mode 100644 index 000000000000..d55aa49e2cbe --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/clusters.go @@ -0,0 +1,785 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// ClustersClient is the stream Analytics Client +type ClustersClient struct { + BaseClient +} + +// NewClustersClient creates an instance of the ClustersClient client. +func NewClustersClient(subscriptionID string) ClustersClient { + return NewClustersClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewClustersClientWithBaseURI creates an instance of the ClustersClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewClustersClientWithBaseURI(baseURI string, subscriptionID string) ClustersClient { + return ClustersClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrUpdate creates a Stream Analytics Cluster or replaces an already existing cluster. +// Parameters: +// cluster - the definition of the cluster that will be used to create a new cluster or replace the existing +// one. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +// ifMatch - the ETag of the resource. Omit this value to always overwrite the current record set. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new resource to be created, but to prevent updating an existing record +// set. Other values will result in a 412 Pre-condition Failed response. +func (client ClustersClient) CreateOrUpdate(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string, ifNoneMatch string) (result ClustersCreateOrUpdateFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.CreateOrUpdate") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: cluster, + Constraints: []validation.Constraint{{Target: "cluster.Sku", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "cluster.Sku.Capacity", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "cluster.Sku.Capacity", Name: validation.InclusiveMaximum, Rule: int64(216), Chain: nil}, + {Target: "cluster.Sku.Capacity", Name: validation.InclusiveMinimum, Rule: int64(36), Chain: nil}, + }}, + }}}}, + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "CreateOrUpdate", err.Error()) + } + + req, err := client.CreateOrUpdatePreparer(ctx, cluster, resourceGroupName, clusterName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + result, err = client.CreateOrUpdateSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "CreateOrUpdate", result.Response(), "Failure sending request") + return + } + + return +} + +// CreateOrUpdatePreparer prepares the CreateOrUpdate request. +func (client ClustersClient) CreateOrUpdatePreparer(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + cluster.Etag = nil + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), + autorest.WithJSON(cluster), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) CreateOrUpdateSender(req *http.Request) (future ClustersCreateOrUpdateFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always +// closes the http.Response Body. +func (client ClustersClient) CreateOrUpdateResponder(resp *http.Response) (result Cluster, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes the specified cluster. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +func (client ClustersClient) Delete(ctx context.Context, resourceGroupName string, clusterName string) (result ClustersDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "Delete", err.Error()) + } + + req, err := client.DeletePreparer(ctx, resourceGroupName, clusterName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client ClustersClient) DeletePreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) DeleteSender(req *http.Request) (future ClustersDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client ClustersClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets information about the specified cluster. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +func (client ClustersClient) Get(ctx context.Context, resourceGroupName string, clusterName string) (result Cluster, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, clusterName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client ClustersClient) GetPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client ClustersClient) GetResponder(resp *http.Response) (result Cluster, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByResourceGroup lists all of the clusters in the given resource group. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +func (client ClustersClient) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ClusterListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.clr.Response.Response != nil { + sc = result.clr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "ListByResourceGroup", err.Error()) + } + + result.fn = client.listByResourceGroupNextResults + req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.clr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListByResourceGroup", resp, "Failure sending request") + return + } + + result.clr, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListByResourceGroup", resp, "Failure responding to request") + } + if result.clr.hasNextLink() && result.clr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListByResourceGroupPreparer prepares the ListByResourceGroup request. +func (client ClustersClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (client ClustersClient) ListByResourceGroupResponder(resp *http.Response) (result ClusterListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByResourceGroupNextResults retrieves the next set of results, if any. +func (client ClustersClient) listByResourceGroupNextResults(ctx context.Context, lastResults ClusterListResult) (result ClusterListResult, err error) { + req, err := lastResults.clusterListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. +func (client ClustersClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result ClusterListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByResourceGroup(ctx, resourceGroupName) + return +} + +// ListBySubscription lists all of the clusters in the given subscription. +func (client ClustersClient) ListBySubscription(ctx context.Context) (result ClusterListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListBySubscription") + defer func() { + sc := -1 + if result.clr.Response.Response != nil { + sc = result.clr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "ListBySubscription", err.Error()) + } + + result.fn = client.listBySubscriptionNextResults + req, err := client.ListBySubscriptionPreparer(ctx) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListBySubscription", nil, "Failure preparing request") + return + } + + resp, err := client.ListBySubscriptionSender(req) + if err != nil { + result.clr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListBySubscription", resp, "Failure sending request") + return + } + + result.clr, err = client.ListBySubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListBySubscription", resp, "Failure responding to request") + } + if result.clr.hasNextLink() && result.clr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListBySubscriptionPreparer prepares the ListBySubscription request. +func (client ClustersClient) ListBySubscriptionPreparer(ctx context.Context) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListBySubscriptionSender sends the ListBySubscription request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) ListBySubscriptionSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListBySubscriptionResponder handles the response to the ListBySubscription request. The method always +// closes the http.Response Body. +func (client ClustersClient) ListBySubscriptionResponder(resp *http.Response) (result ClusterListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listBySubscriptionNextResults retrieves the next set of results, if any. +func (client ClustersClient) listBySubscriptionNextResults(ctx context.Context, lastResults ClusterListResult) (result ClusterListResult, err error) { + req, err := lastResults.clusterListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listBySubscriptionNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListBySubscriptionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listBySubscriptionNextResults", resp, "Failure sending next results request") + } + result, err = client.ListBySubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listBySubscriptionNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListBySubscriptionComplete enumerates all values, automatically crossing page boundaries as required. +func (client ClustersClient) ListBySubscriptionComplete(ctx context.Context) (result ClusterListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListBySubscription") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListBySubscription(ctx) + return +} + +// ListStreamingJobs lists all of the streaming jobs in the given cluster. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +func (client ClustersClient) ListStreamingJobs(ctx context.Context, resourceGroupName string, clusterName string) (result ClusterJobListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListStreamingJobs") + defer func() { + sc := -1 + if result.cjlr.Response.Response != nil { + sc = result.cjlr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "ListStreamingJobs", err.Error()) + } + + result.fn = client.listStreamingJobsNextResults + req, err := client.ListStreamingJobsPreparer(ctx, resourceGroupName, clusterName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListStreamingJobs", nil, "Failure preparing request") + return + } + + resp, err := client.ListStreamingJobsSender(req) + if err != nil { + result.cjlr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListStreamingJobs", resp, "Failure sending request") + return + } + + result.cjlr, err = client.ListStreamingJobsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListStreamingJobs", resp, "Failure responding to request") + } + if result.cjlr.hasNextLink() && result.cjlr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListStreamingJobsPreparer prepares the ListStreamingJobs request. +func (client ClustersClient) ListStreamingJobsPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListStreamingJobsSender sends the ListStreamingJobs request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) ListStreamingJobsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListStreamingJobsResponder handles the response to the ListStreamingJobs request. The method always +// closes the http.Response Body. +func (client ClustersClient) ListStreamingJobsResponder(resp *http.Response) (result ClusterJobListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listStreamingJobsNextResults retrieves the next set of results, if any. +func (client ClustersClient) listStreamingJobsNextResults(ctx context.Context, lastResults ClusterJobListResult) (result ClusterJobListResult, err error) { + req, err := lastResults.clusterJobListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listStreamingJobsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListStreamingJobsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listStreamingJobsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListStreamingJobsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listStreamingJobsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListStreamingJobsComplete enumerates all values, automatically crossing page boundaries as required. +func (client ClustersClient) ListStreamingJobsComplete(ctx context.Context, resourceGroupName string, clusterName string) (result ClusterJobListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListStreamingJobs") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListStreamingJobs(ctx, resourceGroupName, clusterName) + return +} + +// Update updates an existing cluster. This can be used to partially update (ie. update one or two properties) a +// cluster without affecting the rest of the cluster definition. +// Parameters: +// cluster - the properties specified here will overwrite the corresponding properties in the existing cluster +// (ie. Those properties will be updated). +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +// ifMatch - the ETag of the resource. Omit this value to always overwrite the current record set. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client ClustersClient) Update(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string) (result ClustersUpdateFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.Update") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.ClustersClient", "Update", err.Error()) + } + + req, err := client.UpdatePreparer(ctx, cluster, resourceGroupName, clusterName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Update", nil, "Failure preparing request") + return + } + + result, err = client.UpdateSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Update", result.Response(), "Failure sending request") + return + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client ClustersClient) UpdatePreparer(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + cluster.Etag = nil + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), + autorest.WithJSON(cluster), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client ClustersClient) UpdateSender(req *http.Request) (future ClustersUpdateFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client ClustersClient) UpdateResponder(resp *http.Response) (result Cluster, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/enums.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/enums.go new file mode 100644 index 000000000000..21431e206a0f --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/enums.go @@ -0,0 +1,437 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// AuthenticationMode enumerates the values for authentication mode. +type AuthenticationMode string + +const ( + // ConnectionString ... + ConnectionString AuthenticationMode = "ConnectionString" + // Msi ... + Msi AuthenticationMode = "Msi" + // UserToken ... + UserToken AuthenticationMode = "UserToken" +) + +// PossibleAuthenticationModeValues returns an array of possible values for the AuthenticationMode const type. +func PossibleAuthenticationModeValues() []AuthenticationMode { + return []AuthenticationMode{ConnectionString, Msi, UserToken} +} + +// BindingType enumerates the values for binding type. +type BindingType string + +const ( + // BindingTypeFunctionRetrieveDefaultDefinitionParameters ... + BindingTypeFunctionRetrieveDefaultDefinitionParameters BindingType = "FunctionRetrieveDefaultDefinitionParameters" + // BindingTypeMicrosoftMachineLearningServices ... + BindingTypeMicrosoftMachineLearningServices BindingType = "Microsoft.MachineLearningServices" + // BindingTypeMicrosoftMachineLearningWebService ... + BindingTypeMicrosoftMachineLearningWebService BindingType = "Microsoft.MachineLearning/WebService" + // BindingTypeMicrosoftStreamAnalyticsCLRUdf ... + BindingTypeMicrosoftStreamAnalyticsCLRUdf BindingType = "Microsoft.StreamAnalytics/CLRUdf" + // BindingTypeMicrosoftStreamAnalyticsJavascriptUdf ... + BindingTypeMicrosoftStreamAnalyticsJavascriptUdf BindingType = "Microsoft.StreamAnalytics/JavascriptUdf" +) + +// PossibleBindingTypeValues returns an array of possible values for the BindingType const type. +func PossibleBindingTypeValues() []BindingType { + return []BindingType{BindingTypeFunctionRetrieveDefaultDefinitionParameters, BindingTypeMicrosoftMachineLearningServices, BindingTypeMicrosoftMachineLearningWebService, BindingTypeMicrosoftStreamAnalyticsCLRUdf, BindingTypeMicrosoftStreamAnalyticsJavascriptUdf} +} + +// ClusterProvisioningState enumerates the values for cluster provisioning state. +type ClusterProvisioningState string + +const ( + // Canceled The cluster provisioning was canceled. + Canceled ClusterProvisioningState = "Canceled" + // Failed The cluster provisioning failed. + Failed ClusterProvisioningState = "Failed" + // InProgress The cluster provisioning was inprogress. + InProgress ClusterProvisioningState = "InProgress" + // Succeeded The cluster provisioning succeeded. + Succeeded ClusterProvisioningState = "Succeeded" +) + +// PossibleClusterProvisioningStateValues returns an array of possible values for the ClusterProvisioningState const type. +func PossibleClusterProvisioningStateValues() []ClusterProvisioningState { + return []ClusterProvisioningState{Canceled, Failed, InProgress, Succeeded} +} + +// ClusterSkuName enumerates the values for cluster sku name. +type ClusterSkuName string + +const ( + // Default The default SKU. + Default ClusterSkuName = "Default" +) + +// PossibleClusterSkuNameValues returns an array of possible values for the ClusterSkuName const type. +func PossibleClusterSkuNameValues() []ClusterSkuName { + return []ClusterSkuName{Default} +} + +// CompatibilityLevel enumerates the values for compatibility level. +type CompatibilityLevel string + +const ( + // OneFullStopZero ... + OneFullStopZero CompatibilityLevel = "1.0" +) + +// PossibleCompatibilityLevelValues returns an array of possible values for the CompatibilityLevel const type. +func PossibleCompatibilityLevelValues() []CompatibilityLevel { + return []CompatibilityLevel{OneFullStopZero} +} + +// ContentStoragePolicy enumerates the values for content storage policy. +type ContentStoragePolicy string + +const ( + // ContentStoragePolicyJobStorageAccount ... + ContentStoragePolicyJobStorageAccount ContentStoragePolicy = "JobStorageAccount" + // ContentStoragePolicySystemAccount ... + ContentStoragePolicySystemAccount ContentStoragePolicy = "SystemAccount" +) + +// PossibleContentStoragePolicyValues returns an array of possible values for the ContentStoragePolicy const type. +func PossibleContentStoragePolicyValues() []ContentStoragePolicy { + return []ContentStoragePolicy{ContentStoragePolicyJobStorageAccount, ContentStoragePolicySystemAccount} +} + +// Encoding enumerates the values for encoding. +type Encoding string + +const ( + // UTF8 ... + UTF8 Encoding = "UTF8" +) + +// PossibleEncodingValues returns an array of possible values for the Encoding const type. +func PossibleEncodingValues() []Encoding { + return []Encoding{UTF8} +} + +// EventSerializationType enumerates the values for event serialization type. +type EventSerializationType string + +const ( + // Avro ... + Avro EventSerializationType = "Avro" + // Csv ... + Csv EventSerializationType = "Csv" + // CustomClr ... + CustomClr EventSerializationType = "CustomClr" + // JSON ... + JSON EventSerializationType = "Json" + // Parquet ... + Parquet EventSerializationType = "Parquet" +) + +// PossibleEventSerializationTypeValues returns an array of possible values for the EventSerializationType const type. +func PossibleEventSerializationTypeValues() []EventSerializationType { + return []EventSerializationType{Avro, Csv, CustomClr, JSON, Parquet} +} + +// EventsOutOfOrderPolicy enumerates the values for events out of order policy. +type EventsOutOfOrderPolicy string + +const ( + // Adjust ... + Adjust EventsOutOfOrderPolicy = "Adjust" + // Drop ... + Drop EventsOutOfOrderPolicy = "Drop" +) + +// PossibleEventsOutOfOrderPolicyValues returns an array of possible values for the EventsOutOfOrderPolicy const type. +func PossibleEventsOutOfOrderPolicyValues() []EventsOutOfOrderPolicy { + return []EventsOutOfOrderPolicy{Adjust, Drop} +} + +// JobState enumerates the values for job state. +type JobState string + +const ( + // JobStateCreated The job is currently in the Created state. + JobStateCreated JobState = "Created" + // JobStateDegraded The job is currently in the Degraded state. + JobStateDegraded JobState = "Degraded" + // JobStateDeleting The job is currently in the Deleting state. + JobStateDeleting JobState = "Deleting" + // JobStateFailed The job is currently in the Failed state. + JobStateFailed JobState = "Failed" + // JobStateRestarting The job is currently in the Restarting state. + JobStateRestarting JobState = "Restarting" + // JobStateRunning The job is currently in the Running state. + JobStateRunning JobState = "Running" + // JobStateScaling The job is currently in the Scaling state. + JobStateScaling JobState = "Scaling" + // JobStateStarting The job is currently in the Starting state. + JobStateStarting JobState = "Starting" + // JobStateStopped The job is currently in the Stopped state. + JobStateStopped JobState = "Stopped" + // JobStateStopping The job is currently in the Stopping state. + JobStateStopping JobState = "Stopping" +) + +// PossibleJobStateValues returns an array of possible values for the JobState const type. +func PossibleJobStateValues() []JobState { + return []JobState{JobStateCreated, JobStateDegraded, JobStateDeleting, JobStateFailed, JobStateRestarting, JobStateRunning, JobStateScaling, JobStateStarting, JobStateStopped, JobStateStopping} +} + +// JobType enumerates the values for job type. +type JobType string + +const ( + // Cloud ... + Cloud JobType = "Cloud" + // Edge ... + Edge JobType = "Edge" +) + +// PossibleJobTypeValues returns an array of possible values for the JobType const type. +func PossibleJobTypeValues() []JobType { + return []JobType{Cloud, Edge} +} + +// JSONOutputSerializationFormat enumerates the values for json output serialization format. +type JSONOutputSerializationFormat string + +const ( + // Array ... + Array JSONOutputSerializationFormat = "Array" + // LineSeparated ... + LineSeparated JSONOutputSerializationFormat = "LineSeparated" +) + +// PossibleJSONOutputSerializationFormatValues returns an array of possible values for the JSONOutputSerializationFormat const type. +func PossibleJSONOutputSerializationFormatValues() []JSONOutputSerializationFormat { + return []JSONOutputSerializationFormat{Array, LineSeparated} +} + +// OutputErrorPolicy enumerates the values for output error policy. +type OutputErrorPolicy string + +const ( + // OutputErrorPolicyDrop ... + OutputErrorPolicyDrop OutputErrorPolicy = "Drop" + // OutputErrorPolicyStop ... + OutputErrorPolicyStop OutputErrorPolicy = "Stop" +) + +// PossibleOutputErrorPolicyValues returns an array of possible values for the OutputErrorPolicy const type. +func PossibleOutputErrorPolicyValues() []OutputErrorPolicy { + return []OutputErrorPolicy{OutputErrorPolicyDrop, OutputErrorPolicyStop} +} + +// OutputStartMode enumerates the values for output start mode. +type OutputStartMode string + +const ( + // CustomTime ... + CustomTime OutputStartMode = "CustomTime" + // JobStartTime ... + JobStartTime OutputStartMode = "JobStartTime" + // LastOutputEventTime ... + LastOutputEventTime OutputStartMode = "LastOutputEventTime" +) + +// PossibleOutputStartModeValues returns an array of possible values for the OutputStartMode const type. +func PossibleOutputStartModeValues() []OutputStartMode { + return []OutputStartMode{CustomTime, JobStartTime, LastOutputEventTime} +} + +// StreamingJobSkuName enumerates the values for streaming job sku name. +type StreamingJobSkuName string + +const ( + // Standard ... + Standard StreamingJobSkuName = "Standard" +) + +// PossibleStreamingJobSkuNameValues returns an array of possible values for the StreamingJobSkuName const type. +func PossibleStreamingJobSkuNameValues() []StreamingJobSkuName { + return []StreamingJobSkuName{Standard} +} + +// Type enumerates the values for type. +type Type string + +const ( + // TypeFunctionBinding ... + TypeFunctionBinding Type = "FunctionBinding" + // TypeMicrosoftMachineLearningServices ... + TypeMicrosoftMachineLearningServices Type = "Microsoft.MachineLearningServices" + // TypeMicrosoftMachineLearningWebService ... + TypeMicrosoftMachineLearningWebService Type = "Microsoft.MachineLearning/WebService" + // TypeMicrosoftStreamAnalyticsCLRUdf ... + TypeMicrosoftStreamAnalyticsCLRUdf Type = "Microsoft.StreamAnalytics/CLRUdf" + // TypeMicrosoftStreamAnalyticsJavascriptUdf ... + TypeMicrosoftStreamAnalyticsJavascriptUdf Type = "Microsoft.StreamAnalytics/JavascriptUdf" +) + +// PossibleTypeValues returns an array of possible values for the Type const type. +func PossibleTypeValues() []Type { + return []Type{TypeFunctionBinding, TypeMicrosoftMachineLearningServices, TypeMicrosoftMachineLearningWebService, TypeMicrosoftStreamAnalyticsCLRUdf, TypeMicrosoftStreamAnalyticsJavascriptUdf} +} + +// TypeBasicFunctionProperties enumerates the values for type basic function properties. +type TypeBasicFunctionProperties string + +const ( + // TypeAggregate ... + TypeAggregate TypeBasicFunctionProperties = "Aggregate" + // TypeFunctionProperties ... + TypeFunctionProperties TypeBasicFunctionProperties = "FunctionProperties" + // TypeScalar ... + TypeScalar TypeBasicFunctionProperties = "Scalar" +) + +// PossibleTypeBasicFunctionPropertiesValues returns an array of possible values for the TypeBasicFunctionProperties const type. +func PossibleTypeBasicFunctionPropertiesValues() []TypeBasicFunctionProperties { + return []TypeBasicFunctionProperties{TypeAggregate, TypeFunctionProperties, TypeScalar} +} + +// TypeBasicInputProperties enumerates the values for type basic input properties. +type TypeBasicInputProperties string + +const ( + // TypeInputProperties ... + TypeInputProperties TypeBasicInputProperties = "InputProperties" + // TypeReference ... + TypeReference TypeBasicInputProperties = "Reference" + // TypeStream ... + TypeStream TypeBasicInputProperties = "Stream" +) + +// PossibleTypeBasicInputPropertiesValues returns an array of possible values for the TypeBasicInputProperties const type. +func PossibleTypeBasicInputPropertiesValues() []TypeBasicInputProperties { + return []TypeBasicInputProperties{TypeInputProperties, TypeReference, TypeStream} +} + +// TypeBasicOutputDataSource enumerates the values for type basic output data source. +type TypeBasicOutputDataSource string + +const ( + // TypeMicrosoftAzureFunction ... + TypeMicrosoftAzureFunction TypeBasicOutputDataSource = "Microsoft.AzureFunction" + // TypeMicrosoftDataLakeAccounts ... + TypeMicrosoftDataLakeAccounts TypeBasicOutputDataSource = "Microsoft.DataLake/Accounts" + // TypeMicrosoftEventHubEventHub ... + TypeMicrosoftEventHubEventHub TypeBasicOutputDataSource = "Microsoft.EventHub/EventHub" + // TypeMicrosoftServiceBusEventHub ... + TypeMicrosoftServiceBusEventHub TypeBasicOutputDataSource = "Microsoft.ServiceBus/EventHub" + // TypeMicrosoftServiceBusQueue ... + TypeMicrosoftServiceBusQueue TypeBasicOutputDataSource = "Microsoft.ServiceBus/Queue" + // TypeMicrosoftServiceBusTopic ... + TypeMicrosoftServiceBusTopic TypeBasicOutputDataSource = "Microsoft.ServiceBus/Topic" + // TypeMicrosoftSQLServerDatabase ... + TypeMicrosoftSQLServerDatabase TypeBasicOutputDataSource = "Microsoft.Sql/Server/Database" + // TypeMicrosoftSQLServerDataWarehouse ... + TypeMicrosoftSQLServerDataWarehouse TypeBasicOutputDataSource = "Microsoft.Sql/Server/DataWarehouse" + // TypeMicrosoftStorageBlob ... + TypeMicrosoftStorageBlob TypeBasicOutputDataSource = "Microsoft.Storage/Blob" + // TypeMicrosoftStorageDocumentDB ... + TypeMicrosoftStorageDocumentDB TypeBasicOutputDataSource = "Microsoft.Storage/DocumentDB" + // TypeMicrosoftStorageTable ... + TypeMicrosoftStorageTable TypeBasicOutputDataSource = "Microsoft.Storage/Table" + // TypeOutputDataSource ... + TypeOutputDataSource TypeBasicOutputDataSource = "OutputDataSource" + // TypePowerBI ... + TypePowerBI TypeBasicOutputDataSource = "PowerBI" +) + +// PossibleTypeBasicOutputDataSourceValues returns an array of possible values for the TypeBasicOutputDataSource const type. +func PossibleTypeBasicOutputDataSourceValues() []TypeBasicOutputDataSource { + return []TypeBasicOutputDataSource{TypeMicrosoftAzureFunction, TypeMicrosoftDataLakeAccounts, TypeMicrosoftEventHubEventHub, TypeMicrosoftServiceBusEventHub, TypeMicrosoftServiceBusQueue, TypeMicrosoftServiceBusTopic, TypeMicrosoftSQLServerDatabase, TypeMicrosoftSQLServerDataWarehouse, TypeMicrosoftStorageBlob, TypeMicrosoftStorageDocumentDB, TypeMicrosoftStorageTable, TypeOutputDataSource, TypePowerBI} +} + +// TypeBasicReferenceInputDataSource enumerates the values for type basic reference input data source. +type TypeBasicReferenceInputDataSource string + +const ( + // TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase ... + TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase TypeBasicReferenceInputDataSource = "Microsoft.Sql/Server/Database" + // TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob ... + TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob TypeBasicReferenceInputDataSource = "Microsoft.Storage/Blob" + // TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource ... + TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource TypeBasicReferenceInputDataSource = "ReferenceInputDataSource" +) + +// PossibleTypeBasicReferenceInputDataSourceValues returns an array of possible values for the TypeBasicReferenceInputDataSource const type. +func PossibleTypeBasicReferenceInputDataSourceValues() []TypeBasicReferenceInputDataSource { + return []TypeBasicReferenceInputDataSource{TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase, TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource} +} + +// TypeBasicSerialization enumerates the values for type basic serialization. +type TypeBasicSerialization string + +const ( + // TypeAvro ... + TypeAvro TypeBasicSerialization = "Avro" + // TypeCsv ... + TypeCsv TypeBasicSerialization = "Csv" + // TypeCustomClr ... + TypeCustomClr TypeBasicSerialization = "CustomClr" + // TypeJSON ... + TypeJSON TypeBasicSerialization = "Json" + // TypeParquet ... + TypeParquet TypeBasicSerialization = "Parquet" + // TypeSerialization ... + TypeSerialization TypeBasicSerialization = "Serialization" +) + +// PossibleTypeBasicSerializationValues returns an array of possible values for the TypeBasicSerialization const type. +func PossibleTypeBasicSerializationValues() []TypeBasicSerialization { + return []TypeBasicSerialization{TypeAvro, TypeCsv, TypeCustomClr, TypeJSON, TypeParquet, TypeSerialization} +} + +// TypeBasicStreamInputDataSource enumerates the values for type basic stream input data source. +type TypeBasicStreamInputDataSource string + +const ( + // TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs ... + TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs TypeBasicStreamInputDataSource = "Microsoft.Devices/IotHubs" + // TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub ... + TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub TypeBasicStreamInputDataSource = "Microsoft.EventHub/EventHub" + // TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub ... + TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub TypeBasicStreamInputDataSource = "Microsoft.ServiceBus/EventHub" + // TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob ... + TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob TypeBasicStreamInputDataSource = "Microsoft.Storage/Blob" + // TypeBasicStreamInputDataSourceTypeStreamInputDataSource ... + TypeBasicStreamInputDataSourceTypeStreamInputDataSource TypeBasicStreamInputDataSource = "StreamInputDataSource" +) + +// PossibleTypeBasicStreamInputDataSourceValues returns an array of possible values for the TypeBasicStreamInputDataSource const type. +func PossibleTypeBasicStreamInputDataSourceValues() []TypeBasicStreamInputDataSource { + return []TypeBasicStreamInputDataSource{TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs, TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub, TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub, TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob, TypeBasicStreamInputDataSourceTypeStreamInputDataSource} +} + +// UdfType enumerates the values for udf type. +type UdfType string + +const ( + // Scalar ... + Scalar UdfType = "Scalar" +) + +// PossibleUdfTypeValues returns an array of possible values for the UdfType const type. +func PossibleUdfTypeValues() []UdfType { + return []UdfType{Scalar} +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/functions.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/functions.go new file mode 100644 index 000000000000..8836b7faace0 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/functions.go @@ -0,0 +1,743 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// FunctionsClient is the stream Analytics Client +type FunctionsClient struct { + BaseClient +} + +// NewFunctionsClient creates an instance of the FunctionsClient client. +func NewFunctionsClient(subscriptionID string) FunctionsClient { + return NewFunctionsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewFunctionsClientWithBaseURI creates an instance of the FunctionsClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewFunctionsClientWithBaseURI(baseURI string, subscriptionID string) FunctionsClient { + return FunctionsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates a function or replaces an already existing function under an existing streaming job. +// Parameters: +// function - the definition of the function that will be used to create a new function or replace the existing +// one under the streaming job. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// ifMatch - the ETag of the function. Omit this value to always overwrite the current function. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new function to be created, but to prevent updating an existing +// function. Other values will result in a 412 Pre-condition Failed response. +func (client FunctionsClient) CreateOrReplace(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "CreateOrReplace", err.Error()) + } + + req, err := client.CreateOrReplacePreparer(ctx, function, resourceGroupName, jobName, functionName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client FunctionsClient) CreateOrReplacePreparer(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithJSON(function), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client FunctionsClient) CreateOrReplaceResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes a function from the streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// functionName - the name of the function. +func (client FunctionsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "Delete", err.Error()) + } + + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, functionName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client FunctionsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client FunctionsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified function. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// functionName - the name of the function. +func (client FunctionsClient) Get(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, functionName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client FunctionsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client FunctionsClient) GetResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByStreamingJob lists all of the functions under the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties +// to include in the response, or "*" to include all properties. By default, all properties are returned except +// diagnostics. Currently only accepts '*' as a valid value. +func (client FunctionsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result FunctionListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.flr.Response.Response != nil { + sc = result.flr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "ListByStreamingJob", err.Error()) + } + + result.fn = client.listByStreamingJobNextResults + req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.flr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", resp, "Failure sending request") + return + } + + result.flr, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", resp, "Failure responding to request") + } + if result.flr.hasNextLink() && result.flr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListByStreamingJobPreparer prepares the ListByStreamingJob request. +func (client FunctionsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (client FunctionsClient) ListByStreamingJobResponder(resp *http.Response) (result FunctionListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByStreamingJobNextResults retrieves the next set of results, if any. +func (client FunctionsClient) listByStreamingJobNextResults(ctx context.Context, lastResults FunctionListResult) (result FunctionListResult, err error) { + req, err := lastResults.functionListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. +func (client FunctionsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result FunctionListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) + return +} + +// RetrieveDefaultDefinition retrieves the default definition of a function based on the parameters specified. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// functionRetrieveDefaultDefinitionParameters - parameters used to specify the type of function to retrieve +// the default definition for. +func (client FunctionsClient) RetrieveDefaultDefinition(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *BasicFunctionRetrieveDefaultDefinitionParameters) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.RetrieveDefaultDefinition") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", err.Error()) + } + + req, err := client.RetrieveDefaultDefinitionPreparer(ctx, resourceGroupName, jobName, functionName, functionRetrieveDefaultDefinitionParameters) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", nil, "Failure preparing request") + return + } + + resp, err := client.RetrieveDefaultDefinitionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", resp, "Failure sending request") + return + } + + result, err = client.RetrieveDefaultDefinitionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", resp, "Failure responding to request") + } + + return +} + +// RetrieveDefaultDefinitionPreparer prepares the RetrieveDefaultDefinition request. +func (client FunctionsClient) RetrieveDefaultDefinitionPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *BasicFunctionRetrieveDefaultDefinitionParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if functionRetrieveDefaultDefinitionParameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(functionRetrieveDefaultDefinitionParameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// RetrieveDefaultDefinitionSender sends the RetrieveDefaultDefinition request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) RetrieveDefaultDefinitionSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// RetrieveDefaultDefinitionResponder handles the response to the RetrieveDefaultDefinition request. The method always +// closes the http.Response Body. +func (client FunctionsClient) RetrieveDefaultDefinitionResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Test tests if the information provided for a function is valid. This can range from testing the connection to the +// underlying web service behind the function or making sure the function code provided is syntactically correct. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// function - if the function specified does not already exist, this parameter must contain the full function +// definition intended to be tested. If the function specified already exists, this parameter can be left null +// to test the existing function as is or if specified, the properties specified will overwrite the +// corresponding properties in the existing function (exactly like a PATCH operation) and the resulting +// function will be tested. +func (client FunctionsClient) Test(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *Function) (result FunctionsTestFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Test") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "Test", err.Error()) + } + + req, err := client.TestPreparer(ctx, resourceGroupName, jobName, functionName, function) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = client.TestSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Test", result.Response(), "Failure sending request") + return + } + + return +} + +// TestPreparer prepares the Test request. +func (client FunctionsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *Function) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if function != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(function)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// TestSender sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) TestSender(req *http.Request) (future FunctionsTestFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// TestResponder handles the response to the Test request. The method always +// closes the http.Response Body. +func (client FunctionsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing function under an existing streaming job. This can be used to partially update (ie. +// update one or two properties) a function without affecting the rest the job or function definition. +// Parameters: +// function - a function object. The properties specified here will overwrite the corresponding properties in +// the existing function (ie. Those properties will be updated). Any properties that are set to null here will +// mean that the corresponding property in the existing function will remain the same and not change as a +// result of this PATCH operation. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// functionName - the name of the function. +// ifMatch - the ETag of the function. Omit this value to always overwrite the current function. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client FunctionsClient) Update(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (result Function, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.FunctionsClient", "Update", err.Error()) + } + + req, err := client.UpdatePreparer(ctx, function, resourceGroupName, jobName, functionName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client FunctionsClient) UpdatePreparer(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "functionName": autorest.Encode("path", functionName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), + autorest.WithJSON(function), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client FunctionsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client FunctionsClient) UpdateResponder(resp *http.Response) (result Function, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/inputs.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/inputs.go new file mode 100644 index 000000000000..5e215cda1d27 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/inputs.go @@ -0,0 +1,659 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// InputsClient is the stream Analytics Client +type InputsClient struct { + BaseClient +} + +// NewInputsClient creates an instance of the InputsClient client. +func NewInputsClient(subscriptionID string) InputsClient { + return NewInputsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewInputsClientWithBaseURI creates an instance of the InputsClient client using a custom endpoint. Use this when +// interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewInputsClientWithBaseURI(baseURI string, subscriptionID string) InputsClient { + return InputsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates an input or replaces an already existing input under an existing streaming job. +// Parameters: +// input - the definition of the input that will be used to create a new input or replace the existing one +// under the streaming job. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// inputName - the name of the input. +// ifMatch - the ETag of the input. Omit this value to always overwrite the current input. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new input to be created, but to prevent updating an existing input. +// Other values will result in a 412 Pre-condition Failed response. +func (client InputsClient) CreateOrReplace(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (result Input, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: input, + Constraints: []validation.Constraint{{Target: "input.Properties", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "input.Properties.Compression", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "input.Properties.Compression.Type", Name: validation.Null, Rule: true, Chain: nil}}}, + }}}}, + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.InputsClient", "CreateOrReplace", err.Error()) + } + + req, err := client.CreateOrReplacePreparer(ctx, input, resourceGroupName, jobName, inputName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client InputsClient) CreateOrReplacePreparer(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client InputsClient) CreateOrReplaceResponder(resp *http.Response) (result Input, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes an input from the streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// inputName - the name of the input. +func (client InputsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.InputsClient", "Delete", err.Error()) + } + + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, inputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client InputsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client InputsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified input. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// inputName - the name of the input. +func (client InputsClient) Get(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result Input, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.InputsClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, inputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client InputsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client InputsClient) GetResponder(resp *http.Response) (result Input, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByStreamingJob lists all of the inputs under the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties +// to include in the response, or "*" to include all properties. By default, all properties are returned except +// diagnostics. Currently only accepts '*' as a valid value. +func (client InputsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result InputListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.ilr.Response.Response != nil { + sc = result.ilr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.InputsClient", "ListByStreamingJob", err.Error()) + } + + result.fn = client.listByStreamingJobNextResults + req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.ilr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", resp, "Failure sending request") + return + } + + result.ilr, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", resp, "Failure responding to request") + } + if result.ilr.hasNextLink() && result.ilr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListByStreamingJobPreparer prepares the ListByStreamingJob request. +func (client InputsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (client InputsClient) ListByStreamingJobResponder(resp *http.Response) (result InputListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByStreamingJobNextResults retrieves the next set of results, if any. +func (client InputsClient) listByStreamingJobNextResults(ctx context.Context, lastResults InputListResult) (result InputListResult, err error) { + req, err := lastResults.inputListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. +func (client InputsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result InputListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) + return +} + +// Test tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// inputName - the name of the input. +// input - if the input specified does not already exist, this parameter must contain the full input definition +// intended to be tested. If the input specified already exists, this parameter can be left null to test the +// existing input as is or if specified, the properties specified will overwrite the corresponding properties +// in the existing input (exactly like a PATCH operation) and the resulting input will be tested. +func (client InputsClient) Test(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *Input) (result InputsTestFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Test") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: input, + Constraints: []validation.Constraint{{Target: "input", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "input.Properties", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "input.Properties.Compression", Name: validation.Null, Rule: false, + Chain: []validation.Constraint{{Target: "input.Properties.Compression.Type", Name: validation.Null, Rule: true, Chain: nil}}}, + }}, + }}}}, + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.InputsClient", "Test", err.Error()) + } + + req, err := client.TestPreparer(ctx, resourceGroupName, jobName, inputName, input) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = client.TestSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Test", result.Response(), "Failure sending request") + return + } + + return +} + +// TestPreparer prepares the Test request. +func (client InputsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *Input) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if input != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(input)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// TestSender sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) TestSender(req *http.Request) (future InputsTestFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// TestResponder handles the response to the Test request. The method always +// closes the http.Response Body. +func (client InputsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing input under an existing streaming job. This can be used to partially update (ie. update +// one or two properties) an input without affecting the rest the job or input definition. +// Parameters: +// input - an Input object. The properties specified here will overwrite the corresponding properties in the +// existing input (ie. Those properties will be updated). Any properties that are set to null here will mean +// that the corresponding property in the existing input will remain the same and not change as a result of +// this PATCH operation. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// inputName - the name of the input. +// ifMatch - the ETag of the input. Omit this value to always overwrite the current input. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client InputsClient) Update(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (result Input, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.InputsClient", "Update", err.Error()) + } + + req, err := client.UpdatePreparer(ctx, input, resourceGroupName, jobName, inputName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client InputsClient) UpdatePreparer(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "inputName": autorest.Encode("path", inputName), + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client InputsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client InputsClient) UpdateResponder(resp *http.Response) (result Input, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/models.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/models.go new file mode 100644 index 000000000000..3c6304e2e4b2 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/models.go @@ -0,0 +1,7688 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "encoding/json" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/date" + "github.com/Azure/go-autorest/autorest/to" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// The package's fully qualified name. +const fqdn = "github.com/Azure/azure-sdk-for-go/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics" + +// AggregateFunctionProperties the properties that are associated with an aggregate function. +type AggregateFunctionProperties struct { + // Etag - READ-ONLY; The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + *FunctionConfiguration `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionProperties', 'TypeScalar', 'TypeAggregate' + Type TypeBasicFunctionProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AggregateFunctionProperties. +func (afp AggregateFunctionProperties) MarshalJSON() ([]byte, error) { + afp.Type = TypeAggregate + objectMap := make(map[string]interface{}) + if afp.FunctionConfiguration != nil { + objectMap["properties"] = afp.FunctionConfiguration + } + if afp.Type != "" { + objectMap["type"] = afp.Type + } + return json.Marshal(objectMap) +} + +// AsScalarFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. +func (afp AggregateFunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { + return nil, false +} + +// AsAggregateFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. +func (afp AggregateFunctionProperties) AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) { + return &afp, true +} + +// AsFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. +func (afp AggregateFunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { + return nil, false +} + +// AsBasicFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. +func (afp AggregateFunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { + return &afp, true +} + +// AvroSerialization describes how data from an input is serialized or how data is serialized when written to +// an output in Avro format. +type AvroSerialization struct { + // Properties - The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. + Properties interface{} `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeParquet', 'TypeCustomClr', 'TypeCsv', 'TypeJSON', 'TypeAvro' + Type TypeBasicSerialization `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AvroSerialization. +func (as AvroSerialization) MarshalJSON() ([]byte, error) { + as.Type = TypeAvro + objectMap := make(map[string]interface{}) + if as.Properties != nil { + objectMap["properties"] = as.Properties + } + if as.Type != "" { + objectMap["type"] = as.Type + } + return json.Marshal(objectMap) +} + +// AsParquetSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { + return nil, false +} + +// AsCustomClrSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsCustomClrSerialization() (*CustomClrSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsAvroSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return &as, true +} + +// AsSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for AvroSerialization. +func (as AvroSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &as, true +} + +// AzureDataLakeStoreOutputDataSource describes an Azure Data Lake Store output data source. +type AzureDataLakeStoreOutputDataSource struct { + // AzureDataLakeStoreOutputDataSourceProperties - The properties that are associated with an Azure Data Lake Store output. Required on PUT (CreateOrReplace) requests. + *AzureDataLakeStoreOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) MarshalJSON() ([]byte, error) { + adlsods.Type = TypeMicrosoftDataLakeAccounts + objectMap := make(map[string]interface{}) + if adlsods.AzureDataLakeStoreOutputDataSourceProperties != nil { + objectMap["properties"] = adlsods.AzureDataLakeStoreOutputDataSourceProperties + } + if adlsods.Type != "" { + objectMap["type"] = adlsods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return &adlsods, true +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. +func (adlsods AzureDataLakeStoreOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &adlsods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureDataLakeStoreOutputDataSource struct. +func (adlsods *AzureDataLakeStoreOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureDataLakeStoreOutputDataSourceProperties AzureDataLakeStoreOutputDataSourceProperties + err = json.Unmarshal(*v, &azureDataLakeStoreOutputDataSourceProperties) + if err != nil { + return err + } + adlsods.AzureDataLakeStoreOutputDataSourceProperties = &azureDataLakeStoreOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + adlsods.Type = typeVar + } + } + } + + return nil +} + +// AzureDataLakeStoreOutputDataSourceProperties the properties that are associated with an Azure Data Lake +// Store. +type AzureDataLakeStoreOutputDataSourceProperties struct { + // AccountName - The name of the Azure Data Lake Store account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // TenantID - The tenant id of the user used to obtain the refresh token. Required on PUT (CreateOrReplace) requests. + TenantID *string `json:"tenantId,omitempty"` + // FilePathPrefix - The location of the file to which the output should be written to. Required on PUT (CreateOrReplace) requests. + FilePathPrefix *string `json:"filePathPrefix,omitempty"` + // DateFormat - The date format. Wherever {date} appears in filePathPrefix, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in filePathPrefix, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` + // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + RefreshToken *string `json:"refreshToken,omitempty"` + // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` + // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` +} + +// AzureFunctionOutputDataSource defines the metadata of AzureFunctionOutputDataSource +type AzureFunctionOutputDataSource struct { + // AzureFunctionOutputDataSourceProperties - The properties that are associated with a Azure Function output. Required on PUT (CreateOrReplace) requests. + *AzureFunctionOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) MarshalJSON() ([]byte, error) { + afods.Type = TypeMicrosoftAzureFunction + objectMap := make(map[string]interface{}) + if afods.AzureFunctionOutputDataSourceProperties != nil { + objectMap["properties"] = afods.AzureFunctionOutputDataSourceProperties + } + if afods.Type != "" { + objectMap["type"] = afods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return &afods, true +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. +func (afods AzureFunctionOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &afods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureFunctionOutputDataSource struct. +func (afods *AzureFunctionOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureFunctionOutputDataSourceProperties AzureFunctionOutputDataSourceProperties + err = json.Unmarshal(*v, &azureFunctionOutputDataSourceProperties) + if err != nil { + return err + } + afods.AzureFunctionOutputDataSourceProperties = &azureFunctionOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + afods.Type = typeVar + } + } + } + + return nil +} + +// AzureFunctionOutputDataSourceProperties the properties that are associated with a DocumentDB output. +type AzureFunctionOutputDataSourceProperties struct { + // FunctionAppName - The name of your Azure Functions app. + FunctionAppName *string `json:"functionAppName,omitempty"` + // FunctionName - The name of the function in your Azure Functions app. + FunctionName *string `json:"functionName,omitempty"` + // APIKey - If you want to use an Azure Function from another subscription, you can do so by providing the key to access your function. + APIKey *string `json:"apiKey,omitempty"` + // MaxBatchSize - A property that lets you set the maximum size for each output batch that's sent to your Azure function. The input unit is in bytes. By default, this value is 262,144 bytes (256 KB). + MaxBatchSize *float64 `json:"maxBatchSize,omitempty"` + // MaxBatchCount - A property that lets you specify the maximum number of events in each batch that's sent to Azure Functions. The default value is 100. + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` +} + +// AzureMachineLearningServiceFunctionBinding the binding to an Azure Machine Learning web service. +type AzureMachineLearningServiceFunctionBinding struct { + // AzureMachineLearningServiceFunctionBindingProperties - The binding properties associated with an Azure Machine learning web service. + *AzureMachineLearningServiceFunctionBindingProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftMachineLearningWebService', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftStreamAnalyticsCLRUdf', 'TypeMicrosoftMachineLearningServices' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) MarshalJSON() ([]byte, error) { + amlsfb.Type = TypeMicrosoftMachineLearningServices + objectMap := make(map[string]interface{}) + if amlsfb.AzureMachineLearningServiceFunctionBindingProperties != nil { + objectMap["properties"] = amlsfb.AzureMachineLearningServiceFunctionBindingProperties + } + if amlsfb.Type != "" { + objectMap["type"] = amlsfb.Type + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) AsAzureMachineLearningStudioFunctionBinding() (*AzureMachineLearningStudioFunctionBinding, bool) { + return nil, false +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return nil, false +} + +// AsCSharpFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) AsCSharpFunctionBinding() (*CSharpFunctionBinding, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) AsAzureMachineLearningServiceFunctionBinding() (*AzureMachineLearningServiceFunctionBinding, bool) { + return &amlsfb, true +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return nil, false +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningServiceFunctionBinding. +func (amlsfb AzureMachineLearningServiceFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &amlsfb, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningServiceFunctionBinding struct. +func (amlsfb *AzureMachineLearningServiceFunctionBinding) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureMachineLearningServiceFunctionBindingProperties AzureMachineLearningServiceFunctionBindingProperties + err = json.Unmarshal(*v, &azureMachineLearningServiceFunctionBindingProperties) + if err != nil { + return err + } + amlsfb.AzureMachineLearningServiceFunctionBindingProperties = &azureMachineLearningServiceFunctionBindingProperties + } + case "type": + if v != nil { + var typeVar Type + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + amlsfb.Type = typeVar + } + } + } + + return nil +} + +// AzureMachineLearningServiceFunctionBindingProperties the binding properties associated with an Azure Machine +// learning web service. +type AzureMachineLearningServiceFunctionBindingProperties struct { + // Endpoint - The Request-Response execute endpoint of the Azure Machine Learning web service. + Endpoint *string `json:"endpoint,omitempty"` + // APIKey - The API key used to authenticate with Request-Response endpoint. + APIKey *string `json:"apiKey,omitempty"` + // Inputs - The inputs for the Azure Machine Learning web service endpoint. + Inputs *[]AzureMachineLearningServiceInputColumn `json:"inputs,omitempty"` + // Outputs - A list of outputs from the Azure Machine Learning web service endpoint execution. + Outputs *[]AzureMachineLearningServiceOutputColumn `json:"outputs,omitempty"` + // BatchSize - Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. + BatchSize *int32 `json:"batchSize,omitempty"` + // NumberOfParallelRequests - The number of parallel requests that will be sent per partition of your job to the machine learning service. Default is 1. + NumberOfParallelRequests *int32 `json:"numberOfParallelRequests,omitempty"` +} + +// AzureMachineLearningServiceFunctionBindingRetrievalProperties the binding retrieval properties associated +// with an Azure Machine learning web service. +type AzureMachineLearningServiceFunctionBindingRetrievalProperties struct { + // ExecuteEndpoint - The Request-Response execute endpoint of the Azure Machine Learning web service. + ExecuteEndpoint *string `json:"executeEndpoint,omitempty"` + // UdfType - The function type. Possible values include: 'Scalar' + UdfType UdfType `json:"udfType,omitempty"` +} + +// AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters the parameters needed to retrieve the +// default function definition for an Azure Machine Learning web service function. +type AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters struct { + // AzureMachineLearningServiceFunctionBindingRetrievalProperties - The binding retrieval properties associated with an Azure Machine learning web service. + *AzureMachineLearningServiceFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftMachineLearningServices', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'BindingTypeMicrosoftStreamAnalyticsCLRUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + amlsfrddp.BindingType = BindingTypeMicrosoftMachineLearningServices + objectMap := make(map[string]interface{}) + if amlsfrddp.AzureMachineLearningServiceFunctionBindingRetrievalProperties != nil { + objectMap["bindingRetrievalProperties"] = amlsfrddp.AzureMachineLearningServiceFunctionBindingRetrievalProperties + } + if amlsfrddp.BindingType != "" { + objectMap["bindingType"] = amlsfrddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return &amlsfrddp, true +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsCSharpFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) AsCSharpFunctionRetrieveDefaultDefinitionParameters() (*CSharpFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &amlsfrddp, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters struct. +func (amlsfrddp *AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "bindingRetrievalProperties": + if v != nil { + var azureMachineLearningServiceFunctionBindingRetrievalProperties AzureMachineLearningServiceFunctionBindingRetrievalProperties + err = json.Unmarshal(*v, &azureMachineLearningServiceFunctionBindingRetrievalProperties) + if err != nil { + return err + } + amlsfrddp.AzureMachineLearningServiceFunctionBindingRetrievalProperties = &azureMachineLearningServiceFunctionBindingRetrievalProperties + } + case "bindingType": + if v != nil { + var bindingType BindingType + err = json.Unmarshal(*v, &bindingType) + if err != nil { + return err + } + amlsfrddp.BindingType = bindingType + } + } + } + + return nil +} + +// AzureMachineLearningServiceInputColumn describes an input column for the Azure Machine Learning web service +// endpoint. +type AzureMachineLearningServiceInputColumn struct { + // Name - The name of the input column. + Name *string `json:"name,omitempty"` + // DataType - The (Azure Machine Learning supported) data type of the input column. + DataType *string `json:"dataType,omitempty"` + // MapTo - The zero based index of the function parameter this input maps to. + MapTo *int32 `json:"mapTo,omitempty"` +} + +// AzureMachineLearningServiceInputs the inputs for the Azure Machine Learning web service endpoint. +type AzureMachineLearningServiceInputs struct { + // Name - The name of the input. This is the name provided while authoring the endpoint. + Name *string `json:"name,omitempty"` + // ColumnNames - A list of input columns for the Azure Machine Learning web service endpoint. + ColumnNames *[]AzureMachineLearningServiceInputColumn `json:"columnNames,omitempty"` +} + +// AzureMachineLearningServiceOutputColumn describes an output column for the Azure Machine Learning web +// service endpoint. +type AzureMachineLearningServiceOutputColumn struct { + // Name - The name of the output column. + Name *string `json:"name,omitempty"` + // DataType - The (Azure Machine Learning supported) data type of the output column. + DataType *string `json:"dataType,omitempty"` + // MapTo - The zero based index of the function parameter this input maps to. + MapTo *int32 `json:"mapTo,omitempty"` +} + +// AzureMachineLearningStudioFunctionBinding the binding to an Azure Machine Learning Studio. +type AzureMachineLearningStudioFunctionBinding struct { + // AzureMachineLearningStudioFunctionBindingProperties - The binding properties associated with an Azure Machine learning Studio. + *AzureMachineLearningStudioFunctionBindingProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftMachineLearningWebService', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftStreamAnalyticsCLRUdf', 'TypeMicrosoftMachineLearningServices' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) MarshalJSON() ([]byte, error) { + amlsfb.Type = TypeMicrosoftMachineLearningWebService + objectMap := make(map[string]interface{}) + if amlsfb.AzureMachineLearningStudioFunctionBindingProperties != nil { + objectMap["properties"] = amlsfb.AzureMachineLearningStudioFunctionBindingProperties + } + if amlsfb.Type != "" { + objectMap["type"] = amlsfb.Type + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) AsAzureMachineLearningStudioFunctionBinding() (*AzureMachineLearningStudioFunctionBinding, bool) { + return &amlsfb, true +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return nil, false +} + +// AsCSharpFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) AsCSharpFunctionBinding() (*CSharpFunctionBinding, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) AsAzureMachineLearningServiceFunctionBinding() (*AzureMachineLearningServiceFunctionBinding, bool) { + return nil, false +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return nil, false +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningStudioFunctionBinding. +func (amlsfb AzureMachineLearningStudioFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &amlsfb, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningStudioFunctionBinding struct. +func (amlsfb *AzureMachineLearningStudioFunctionBinding) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureMachineLearningStudioFunctionBindingProperties AzureMachineLearningStudioFunctionBindingProperties + err = json.Unmarshal(*v, &azureMachineLearningStudioFunctionBindingProperties) + if err != nil { + return err + } + amlsfb.AzureMachineLearningStudioFunctionBindingProperties = &azureMachineLearningStudioFunctionBindingProperties + } + case "type": + if v != nil { + var typeVar Type + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + amlsfb.Type = typeVar + } + } + } + + return nil +} + +// AzureMachineLearningStudioFunctionBindingProperties the binding properties associated with an Azure Machine +// learning Studio. +type AzureMachineLearningStudioFunctionBindingProperties struct { + // Endpoint - The Request-Response execute endpoint of the Azure Machine Learning Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + Endpoint *string `json:"endpoint,omitempty"` + // APIKey - The API key used to authenticate with Request-Response endpoint. + APIKey *string `json:"apiKey,omitempty"` + // Inputs - The inputs for the Azure Machine Learning Studio endpoint. + Inputs *AzureMachineLearningStudioInputs `json:"inputs,omitempty"` + // Outputs - A list of outputs from the Azure Machine Learning Studio endpoint execution. + Outputs *[]AzureMachineLearningStudioOutputColumn `json:"outputs,omitempty"` + // BatchSize - Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. + BatchSize *int32 `json:"batchSize,omitempty"` +} + +// AzureMachineLearningStudioFunctionBindingRetrievalProperties the binding retrieval properties associated +// with an Azure Machine learning Studio. +type AzureMachineLearningStudioFunctionBindingRetrievalProperties struct { + // ExecuteEndpoint - The Request-Response execute endpoint of the Azure Machine Learning Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + ExecuteEndpoint *string `json:"executeEndpoint,omitempty"` + // UdfType - The function type. Possible values include: 'Scalar' + UdfType UdfType `json:"udfType,omitempty"` +} + +// AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters the parameters needed to retrieve the +// default function definition for an Azure Machine Learning Studio function. +type AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters struct { + // AzureMachineLearningStudioFunctionBindingRetrievalProperties - The binding retrieval properties associated with an Azure Machine learning Studio. + *AzureMachineLearningStudioFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftMachineLearningServices', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'BindingTypeMicrosoftStreamAnalyticsCLRUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + amlsfrddp.BindingType = BindingTypeMicrosoftMachineLearningWebService + objectMap := make(map[string]interface{}) + if amlsfrddp.AzureMachineLearningStudioFunctionBindingRetrievalProperties != nil { + objectMap["bindingRetrievalProperties"] = amlsfrddp.AzureMachineLearningStudioFunctionBindingRetrievalProperties + } + if amlsfrddp.BindingType != "" { + objectMap["bindingType"] = amlsfrddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, bool) { + return &amlsfrddp, true +} + +// AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsCSharpFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) AsCSharpFunctionRetrieveDefaultDefinitionParameters() (*CSharpFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters. +func (amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &amlsfrddp, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters struct. +func (amlsfrddp *AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "bindingRetrievalProperties": + if v != nil { + var azureMachineLearningStudioFunctionBindingRetrievalProperties AzureMachineLearningStudioFunctionBindingRetrievalProperties + err = json.Unmarshal(*v, &azureMachineLearningStudioFunctionBindingRetrievalProperties) + if err != nil { + return err + } + amlsfrddp.AzureMachineLearningStudioFunctionBindingRetrievalProperties = &azureMachineLearningStudioFunctionBindingRetrievalProperties + } + case "bindingType": + if v != nil { + var bindingType BindingType + err = json.Unmarshal(*v, &bindingType) + if err != nil { + return err + } + amlsfrddp.BindingType = bindingType + } + } + } + + return nil +} + +// AzureMachineLearningStudioInputColumn describes an input column for the Azure Machine Learning Studio +// endpoint. +type AzureMachineLearningStudioInputColumn struct { + // Name - The name of the input column. + Name *string `json:"name,omitempty"` + // DataType - The (Azure Machine Learning supported) data type of the input column. A list of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + DataType *string `json:"dataType,omitempty"` + // MapTo - The zero based index of the function parameter this input maps to. + MapTo *int32 `json:"mapTo,omitempty"` +} + +// AzureMachineLearningStudioInputs the inputs for the Azure Machine Learning Studio endpoint. +type AzureMachineLearningStudioInputs struct { + // Name - The name of the input. This is the name provided while authoring the endpoint. + Name *string `json:"name,omitempty"` + // ColumnNames - A list of input columns for the Azure Machine Learning Studio endpoint. + ColumnNames *[]AzureMachineLearningStudioInputColumn `json:"columnNames,omitempty"` +} + +// AzureMachineLearningStudioOutputColumn describes an output column for the Azure Machine Learning Studio +// endpoint. +type AzureMachineLearningStudioOutputColumn struct { + // Name - The name of the output column. + Name *string `json:"name,omitempty"` + // DataType - The (Azure Machine Learning supported) data type of the output column. A list of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . + DataType *string `json:"dataType,omitempty"` +} + +// AzureSQLDatabaseDataSourceProperties the properties that are associated with an Azure SQL database data +// source. +type AzureSQLDatabaseDataSourceProperties struct { + // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Server *string `json:"server,omitempty"` + // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + User *string `json:"user,omitempty"` + // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Password *string `json:"password,omitempty"` + // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // MaxBatchCount - Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` + // MaxWriterCount - Max Write r count, currently only 1(single writer) and 0(based on query partition) are available. Optional on PUT requests. + MaxWriterCount *float64 `json:"maxWriterCount,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// AzureSQLDatabaseOutputDataSource describes an Azure SQL database output data source. +type AzureSQLDatabaseOutputDataSource struct { + // AzureSQLDatabaseOutputDataSourceProperties - The properties that are associated with an Azure SQL database output. Required on PUT (CreateOrReplace) requests. + *AzureSQLDatabaseOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) MarshalJSON() ([]byte, error) { + asdods.Type = TypeMicrosoftSQLServerDatabase + objectMap := make(map[string]interface{}) + if asdods.AzureSQLDatabaseOutputDataSourceProperties != nil { + objectMap["properties"] = asdods.AzureSQLDatabaseOutputDataSourceProperties + } + if asdods.Type != "" { + objectMap["type"] = asdods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return &asdods, true +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. +func (asdods AzureSQLDatabaseOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &asdods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureSQLDatabaseOutputDataSource struct. +func (asdods *AzureSQLDatabaseOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureSQLDatabaseOutputDataSourceProperties AzureSQLDatabaseOutputDataSourceProperties + err = json.Unmarshal(*v, &azureSQLDatabaseOutputDataSourceProperties) + if err != nil { + return err + } + asdods.AzureSQLDatabaseOutputDataSourceProperties = &azureSQLDatabaseOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + asdods.Type = typeVar + } + } + } + + return nil +} + +// AzureSQLDatabaseOutputDataSourceProperties the properties that are associated with an Azure SQL database +// output. +type AzureSQLDatabaseOutputDataSourceProperties struct { + // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Server *string `json:"server,omitempty"` + // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + User *string `json:"user,omitempty"` + // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Password *string `json:"password,omitempty"` + // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // MaxBatchCount - Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` + // MaxWriterCount - Max Write r count, currently only 1(single writer) and 0(based on query partition) are available. Optional on PUT requests. + MaxWriterCount *float64 `json:"maxWriterCount,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// AzureSQLReferenceInputDataSource describes an Azure SQL database reference input data source. +type AzureSQLReferenceInputDataSource struct { + Properties *AzureSQLReferenceInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase' + Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureSQLReferenceInputDataSource. +func (asrids AzureSQLReferenceInputDataSource) MarshalJSON() ([]byte, error) { + asrids.Type = TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase + objectMap := make(map[string]interface{}) + if asrids.Properties != nil { + objectMap["properties"] = asrids.Properties + } + if asrids.Type != "" { + objectMap["type"] = asrids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. +func (asrids AzureSQLReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { + return nil, false +} + +// AsAzureSQLReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. +func (asrids AzureSQLReferenceInputDataSource) AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) { + return &asrids, true +} + +// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. +func (asrids AzureSQLReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { + return nil, false +} + +// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. +func (asrids AzureSQLReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { + return &asrids, true +} + +// AzureSQLReferenceInputDataSourceProperties ... +type AzureSQLReferenceInputDataSourceProperties struct { + // Server - This element is associated with the datasource element. This is the name of the server that contains the database that will be written to. + Server *string `json:"server,omitempty"` + // Database - This element is associated with the datasource element. This is the name of the database that output will be written to. + Database *string `json:"database,omitempty"` + // User - This element is associated with the datasource element. This is the user name that will be used to connect to the SQL Database instance. + User *string `json:"user,omitempty"` + // Password - This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. + Password *string `json:"password,omitempty"` + // Table - This element is associated with the datasource element. The name of the table in the Azure SQL database.. + Table *string `json:"table,omitempty"` + // RefreshType - This element is associated with the datasource element. This element is of enum type. It indicates what kind of data refresh option do we want to use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta + RefreshType *string `json:"refreshType,omitempty"` + // RefreshRate - This element is associated with the datasource element. This indicates how frequently the data will be fetched from the database. It is of DateTime format. + RefreshRate *string `json:"refreshRate,omitempty"` + // FullSnapshotQuery - This element is associated with the datasource element. This query is used to fetch data from the sql database. + FullSnapshotQuery *string `json:"fullSnapshotQuery,omitempty"` + // DeltaSnapshotQuery - This element is associated with the datasource element. This query is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. + DeltaSnapshotQuery *string `json:"deltaSnapshotQuery,omitempty"` +} + +// AzureSynapseDataSourceProperties the properties that are associated with an Azure SQL database data source. +type AzureSynapseDataSourceProperties struct { + // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Server *string `json:"server,omitempty"` + // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + User *string `json:"user,omitempty"` + // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Password *string `json:"password,omitempty"` +} + +// AzureSynapseOutputDataSource describes an Azure Synapse output data source. +type AzureSynapseOutputDataSource struct { + // AzureSynapseOutputDataSourceProperties - The properties that are associated with an Azure Synapse output. Required on PUT (CreateOrReplace) requests. + *AzureSynapseOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) MarshalJSON() ([]byte, error) { + asods.Type = TypeMicrosoftSQLServerDataWarehouse + objectMap := make(map[string]interface{}) + if asods.AzureSynapseOutputDataSourceProperties != nil { + objectMap["properties"] = asods.AzureSynapseOutputDataSourceProperties + } + if asods.Type != "" { + objectMap["type"] = asods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return &asods, true +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. +func (asods AzureSynapseOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &asods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureSynapseOutputDataSource struct. +func (asods *AzureSynapseOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureSynapseOutputDataSourceProperties AzureSynapseOutputDataSourceProperties + err = json.Unmarshal(*v, &azureSynapseOutputDataSourceProperties) + if err != nil { + return err + } + asods.AzureSynapseOutputDataSourceProperties = &azureSynapseOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + asods.Type = typeVar + } + } + } + + return nil +} + +// AzureSynapseOutputDataSourceProperties the properties that are associated with an Azure Synapse output. +type AzureSynapseOutputDataSourceProperties struct { + // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Server *string `json:"server,omitempty"` + // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + User *string `json:"user,omitempty"` + // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. + Password *string `json:"password,omitempty"` +} + +// AzureTableOutputDataSource describes an Azure Table output data source. +type AzureTableOutputDataSource struct { + // AzureTableOutputDataSourceProperties - The properties that are associated with an Azure Table output. Required on PUT (CreateOrReplace) requests. + *AzureTableOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) MarshalJSON() ([]byte, error) { + atods.Type = TypeMicrosoftStorageTable + objectMap := make(map[string]interface{}) + if atods.AzureTableOutputDataSourceProperties != nil { + objectMap["properties"] = atods.AzureTableOutputDataSourceProperties + } + if atods.Type != "" { + objectMap["type"] = atods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return &atods, true +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. +func (atods AzureTableOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &atods, true +} + +// UnmarshalJSON is the custom unmarshaler for AzureTableOutputDataSource struct. +func (atods *AzureTableOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var azureTableOutputDataSourceProperties AzureTableOutputDataSourceProperties + err = json.Unmarshal(*v, &azureTableOutputDataSourceProperties) + if err != nil { + return err + } + atods.AzureTableOutputDataSourceProperties = &azureTableOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + atods.Type = typeVar + } + } + } + + return nil +} + +// AzureTableOutputDataSourceProperties the properties that are associated with an Azure Table output. +type AzureTableOutputDataSourceProperties struct { + // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` + // Table - The name of the Azure Table. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // PartitionKey - This element indicates the name of a column from the SELECT statement in the query that will be used as the partition key for the Azure Table. Required on PUT (CreateOrReplace) requests. + PartitionKey *string `json:"partitionKey,omitempty"` + // RowKey - This element indicates the name of a column from the SELECT statement in the query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) requests. + RowKey *string `json:"rowKey,omitempty"` + // ColumnsToRemove - If specified, each item in the array is the name of a column to remove (if present) from output event entities. + ColumnsToRemove *[]string `json:"columnsToRemove,omitempty"` + // BatchSize - The number of rows to write to the Azure Table at a time. + BatchSize *int32 `json:"batchSize,omitempty"` +} + +// BlobDataSourceProperties the properties that are associated with a blob data source. +type BlobDataSourceProperties struct { + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// BlobOutputDataSource describes a blob output data source. +type BlobOutputDataSource struct { + // BlobOutputDataSourceProperties - The properties that are associated with a blob output. Required on PUT (CreateOrReplace) requests. + *BlobOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobOutputDataSource. +func (bods BlobOutputDataSource) MarshalJSON() ([]byte, error) { + bods.Type = TypeMicrosoftStorageBlob + objectMap := make(map[string]interface{}) + if bods.BlobOutputDataSourceProperties != nil { + objectMap["properties"] = bods.BlobOutputDataSourceProperties + } + if bods.Type != "" { + objectMap["type"] = bods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return &bods, true +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. +func (bods BlobOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &bods, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobOutputDataSource struct. +func (bods *BlobOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobOutputDataSourceProperties BlobOutputDataSourceProperties + err = json.Unmarshal(*v, &blobOutputDataSourceProperties) + if err != nil { + return err + } + bods.BlobOutputDataSourceProperties = &blobOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bods.Type = typeVar + } + } + } + + return nil +} + +// BlobOutputDataSourceProperties the properties that are associated with a blob output. +type BlobOutputDataSourceProperties struct { + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// BlobReferenceInputDataSource describes a blob input data source that contains reference data. +type BlobReferenceInputDataSource struct { + // BlobReferenceInputDataSourceProperties - The properties that are associated with a blob input containing reference data. Required on PUT (CreateOrReplace) requests. + *BlobReferenceInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase' + Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) MarshalJSON() ([]byte, error) { + brids.Type = TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob + objectMap := make(map[string]interface{}) + if brids.BlobReferenceInputDataSourceProperties != nil { + objectMap["properties"] = brids.BlobReferenceInputDataSourceProperties + } + if brids.Type != "" { + objectMap["type"] = brids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { + return &brids, true +} + +// AsAzureSQLReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) { + return nil, false +} + +// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { + return nil, false +} + +// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. +func (brids BlobReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { + return &brids, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobReferenceInputDataSource struct. +func (brids *BlobReferenceInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobReferenceInputDataSourceProperties BlobReferenceInputDataSourceProperties + err = json.Unmarshal(*v, &blobReferenceInputDataSourceProperties) + if err != nil { + return err + } + brids.BlobReferenceInputDataSourceProperties = &blobReferenceInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicReferenceInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + brids.Type = typeVar + } + } + } + + return nil +} + +// BlobReferenceInputDataSourceProperties the properties that are associated with a blob input containing +// reference data. +type BlobReferenceInputDataSourceProperties struct { + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// BlobStreamInputDataSource describes a blob input data source that contains stream data. +type BlobStreamInputDataSource struct { + // BlobStreamInputDataSourceProperties - The properties that are associated with a blob input containing stream data. Required on PUT (CreateOrReplace) requests. + *BlobStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) MarshalJSON() ([]byte, error) { + bsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob + objectMap := make(map[string]interface{}) + if bsids.BlobStreamInputDataSourceProperties != nil { + objectMap["properties"] = bsids.BlobStreamInputDataSourceProperties + } + if bsids.Type != "" { + objectMap["type"] = bsids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return &bsids, true +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { + return nil, false +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. +func (bsids BlobStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &bsids, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobStreamInputDataSource struct. +func (bsids *BlobStreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobStreamInputDataSourceProperties BlobStreamInputDataSourceProperties + err = json.Unmarshal(*v, &blobStreamInputDataSourceProperties) + if err != nil { + return err + } + bsids.BlobStreamInputDataSourceProperties = &blobStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bsids.Type = typeVar + } + } + } + + return nil +} + +// BlobStreamInputDataSourceProperties the properties that are associated with a blob input containing stream +// data. +type BlobStreamInputDataSourceProperties struct { + // SourcePartitionCount - The partition count of the blob input data source. Range 1 - 256. + SourcePartitionCount *int32 `json:"sourcePartitionCount,omitempty"` + // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. + Container *string `json:"container,omitempty"` + // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. + PathPattern *string `json:"pathPattern,omitempty"` + // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. + DateFormat *string `json:"dateFormat,omitempty"` + // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. + TimeFormat *string `json:"timeFormat,omitempty"` +} + +// Cluster a Stream Analytics Cluster object +type Cluster struct { + autorest.Response `json:"-"` + Sku *ClusterSku `json:"sku,omitempty"` + // Etag - READ-ONLY; The current entity tag for the cluster. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + // Properties - The properties associated with a Stream Analytics cluster. + Properties *ClusterProperties `json:"properties,omitempty"` + // Tags - Resource tags. + Tags map[string]*string `json:"tags"` + // Location - The geo-location where the resource lives + Location *string `json:"location,omitempty"` + // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; The name of the resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Cluster. +func (c Cluster) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if c.Sku != nil { + objectMap["sku"] = c.Sku + } + if c.Properties != nil { + objectMap["properties"] = c.Properties + } + if c.Tags != nil { + objectMap["tags"] = c.Tags + } + if c.Location != nil { + objectMap["location"] = c.Location + } + return json.Marshal(objectMap) +} + +// ClusterInfo the properties associated with a Stream Analytics cluster. +type ClusterInfo struct { + // ID - The resource id of cluster. + ID *string `json:"id,omitempty"` +} + +// ClusterJob a streaming job. +type ClusterJob struct { + // ID - READ-ONLY; Resource ID of the streaming job. + ID *string `json:"id,omitempty"` + // StreamingUnits - READ-ONLY; The number of streaming units that are used by the streaming job. + StreamingUnits *int32 `json:"streamingUnits,omitempty"` + // JobState - Possible values include: 'JobStateCreated', 'JobStateStarting', 'JobStateRunning', 'JobStateStopping', 'JobStateStopped', 'JobStateDeleting', 'JobStateFailed', 'JobStateDegraded', 'JobStateRestarting', 'JobStateScaling' + JobState JobState `json:"jobState,omitempty"` +} + +// MarshalJSON is the custom marshaler for ClusterJob. +func (cj ClusterJob) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if cj.JobState != "" { + objectMap["jobState"] = cj.JobState + } + return json.Marshal(objectMap) +} + +// ClusterJobListResult a list of streaming jobs. Populated by a List operation. +type ClusterJobListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of streaming jobs. + Value *[]ClusterJob `json:"value,omitempty"` + // NextLink - READ-ONLY; The URL to fetch the next set of streaming jobs. + NextLink *string `json:"nextLink,omitempty"` +} + +// ClusterJobListResultIterator provides access to a complete listing of ClusterJob values. +type ClusterJobListResultIterator struct { + i int + page ClusterJobListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ClusterJobListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClusterJobListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ClusterJobListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ClusterJobListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ClusterJobListResultIterator) Response() ClusterJobListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ClusterJobListResultIterator) Value() ClusterJob { + if !iter.page.NotDone() { + return ClusterJob{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ClusterJobListResultIterator type. +func NewClusterJobListResultIterator(page ClusterJobListResultPage) ClusterJobListResultIterator { + return ClusterJobListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (cjlr ClusterJobListResult) IsEmpty() bool { + return cjlr.Value == nil || len(*cjlr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (cjlr ClusterJobListResult) hasNextLink() bool { + return cjlr.NextLink != nil && len(*cjlr.NextLink) != 0 +} + +// clusterJobListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (cjlr ClusterJobListResult) clusterJobListResultPreparer(ctx context.Context) (*http.Request, error) { + if !cjlr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(cjlr.NextLink))) +} + +// ClusterJobListResultPage contains a page of ClusterJob values. +type ClusterJobListResultPage struct { + fn func(context.Context, ClusterJobListResult) (ClusterJobListResult, error) + cjlr ClusterJobListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ClusterJobListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClusterJobListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.cjlr) + if err != nil { + return err + } + page.cjlr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ClusterJobListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ClusterJobListResultPage) NotDone() bool { + return !page.cjlr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ClusterJobListResultPage) Response() ClusterJobListResult { + return page.cjlr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ClusterJobListResultPage) Values() []ClusterJob { + if page.cjlr.IsEmpty() { + return nil + } + return *page.cjlr.Value +} + +// Creates a new instance of the ClusterJobListResultPage type. +func NewClusterJobListResultPage(getNextPage func(context.Context, ClusterJobListResult) (ClusterJobListResult, error)) ClusterJobListResultPage { + return ClusterJobListResultPage{fn: getNextPage} +} + +// ClusterListResult a list of clusters populated by a 'list' operation. +type ClusterListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of clusters. + Value *[]Cluster `json:"value,omitempty"` + // NextLink - READ-ONLY; The URL to fetch the next set of clusters. + NextLink *string `json:"nextLink,omitempty"` +} + +// ClusterListResultIterator provides access to a complete listing of Cluster values. +type ClusterListResultIterator struct { + i int + page ClusterListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ClusterListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClusterListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ClusterListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ClusterListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ClusterListResultIterator) Response() ClusterListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ClusterListResultIterator) Value() Cluster { + if !iter.page.NotDone() { + return Cluster{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ClusterListResultIterator type. +func NewClusterListResultIterator(page ClusterListResultPage) ClusterListResultIterator { + return ClusterListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (clr ClusterListResult) IsEmpty() bool { + return clr.Value == nil || len(*clr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (clr ClusterListResult) hasNextLink() bool { + return clr.NextLink != nil && len(*clr.NextLink) != 0 +} + +// clusterListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (clr ClusterListResult) clusterListResultPreparer(ctx context.Context) (*http.Request, error) { + if !clr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(clr.NextLink))) +} + +// ClusterListResultPage contains a page of Cluster values. +type ClusterListResultPage struct { + fn func(context.Context, ClusterListResult) (ClusterListResult, error) + clr ClusterListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ClusterListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ClusterListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.clr) + if err != nil { + return err + } + page.clr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ClusterListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ClusterListResultPage) NotDone() bool { + return !page.clr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ClusterListResultPage) Response() ClusterListResult { + return page.clr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ClusterListResultPage) Values() []Cluster { + if page.clr.IsEmpty() { + return nil + } + return *page.clr.Value +} + +// Creates a new instance of the ClusterListResultPage type. +func NewClusterListResultPage(getNextPage func(context.Context, ClusterListResult) (ClusterListResult, error)) ClusterListResultPage { + return ClusterListResultPage{fn: getNextPage} +} + +// ClusterProperties the properties associated with a Stream Analytics cluster. +type ClusterProperties struct { + // CreatedDate - READ-ONLY; The date this cluster was created. + CreatedDate *date.Time `json:"createdDate,omitempty"` + // ClusterID - READ-ONLY; Unique identifier for the cluster. + ClusterID *string `json:"clusterId,omitempty"` + // ProvisioningState - Possible values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + ProvisioningState ClusterProvisioningState `json:"provisioningState,omitempty"` + // CapacityAllocated - READ-ONLY; Represents the number of streaming units currently being used on the cluster. + CapacityAllocated *int32 `json:"capacityAllocated,omitempty"` + // CapacityAssigned - READ-ONLY; Represents the sum of the SUs of all streaming jobs associated with the cluster. If all of the jobs were running, this would be the capacity allocated. + CapacityAssigned *int32 `json:"capacityAssigned,omitempty"` +} + +// MarshalJSON is the custom marshaler for ClusterProperties. +func (cp ClusterProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if cp.ProvisioningState != "" { + objectMap["provisioningState"] = cp.ProvisioningState + } + return json.Marshal(objectMap) +} + +// ClustersCreateOrUpdateFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type ClustersCreateOrUpdateFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ClustersCreateOrUpdateFuture) Result(client ClustersClient) (c Cluster, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersCreateOrUpdateFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.ClustersCreateOrUpdateFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if c.Response.Response, err = future.GetResult(sender); err == nil && c.Response.Response.StatusCode != http.StatusNoContent { + c, err = client.CreateOrUpdateResponder(c.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersCreateOrUpdateFuture", "Result", c.Response.Response, "Failure responding to request") + } + } + return +} + +// ClustersDeleteFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type ClustersDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ClustersDeleteFuture) Result(client ClustersClient) (ar autorest.Response, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.ClustersDeleteFuture") + return + } + ar.Response = future.Response() + return +} + +// ClusterSku the SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT +// (CreateOrUpdate) requests. +type ClusterSku struct { + // Name - Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. Possible values include: 'Default' + Name ClusterSkuName `json:"name,omitempty"` + // Capacity - Denotes the number of streaming units the cluster can support. Valid values for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. Required on PUT (CreateOrUpdate) requests. + Capacity *int32 `json:"capacity,omitempty"` +} + +// ClustersUpdateFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type ClustersUpdateFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ClustersUpdateFuture) Result(client ClustersClient) (c Cluster, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersUpdateFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.ClustersUpdateFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if c.Response.Response, err = future.GetResult(sender); err == nil && c.Response.Response.StatusCode != http.StatusNoContent { + c, err = client.UpdateResponder(c.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.ClustersUpdateFuture", "Result", c.Response.Response, "Failure responding to request") + } + } + return +} + +// Compression describes how input data is compressed +type Compression struct { + Type *string `json:"type,omitempty"` +} + +// CSharpFunctionBinding the binding to a CSharp function. +type CSharpFunctionBinding struct { + // CSharpFunctionBindingProperties - The binding properties associated with a CSharp function. + *CSharpFunctionBindingProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftMachineLearningWebService', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftStreamAnalyticsCLRUdf', 'TypeMicrosoftMachineLearningServices' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) MarshalJSON() ([]byte, error) { + csfb.Type = TypeMicrosoftStreamAnalyticsCLRUdf + objectMap := make(map[string]interface{}) + if csfb.CSharpFunctionBindingProperties != nil { + objectMap["properties"] = csfb.CSharpFunctionBindingProperties + } + if csfb.Type != "" { + objectMap["type"] = csfb.Type + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionBinding is the BasicFunctionBinding implementation for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) AsAzureMachineLearningStudioFunctionBinding() (*AzureMachineLearningStudioFunctionBinding, bool) { + return nil, false +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return nil, false +} + +// AsCSharpFunctionBinding is the BasicFunctionBinding implementation for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) AsCSharpFunctionBinding() (*CSharpFunctionBinding, bool) { + return &csfb, true +} + +// AsAzureMachineLearningServiceFunctionBinding is the BasicFunctionBinding implementation for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) AsAzureMachineLearningServiceFunctionBinding() (*AzureMachineLearningServiceFunctionBinding, bool) { + return nil, false +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return nil, false +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for CSharpFunctionBinding. +func (csfb CSharpFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &csfb, true +} + +// UnmarshalJSON is the custom unmarshaler for CSharpFunctionBinding struct. +func (csfb *CSharpFunctionBinding) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var cSharpFunctionBindingProperties CSharpFunctionBindingProperties + err = json.Unmarshal(*v, &cSharpFunctionBindingProperties) + if err != nil { + return err + } + csfb.CSharpFunctionBindingProperties = &cSharpFunctionBindingProperties + } + case "type": + if v != nil { + var typeVar Type + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + csfb.Type = typeVar + } + } + } + + return nil +} + +// CSharpFunctionBindingProperties the binding properties associated with a CSharp function. +type CSharpFunctionBindingProperties struct { + // Script - The Csharp code containing a single function definition. + Script *string `json:"script,omitempty"` + // DllPath - The Csharp code containing a single function definition. + DllPath *string `json:"dllPath,omitempty"` + // Class - The Csharp code containing a single function definition. + Class *string `json:"class,omitempty"` + // Method - The Csharp code containing a single function definition. + Method *string `json:"method,omitempty"` +} + +// CSharpFunctionBindingRetrievalProperties the binding retrieval properties associated with a CSharp function. +type CSharpFunctionBindingRetrievalProperties struct { + // Script - The CSharp code containing a single function definition. + Script *string `json:"script,omitempty"` + // UdfType - The function type. Possible values include: 'Scalar' + UdfType UdfType `json:"udfType,omitempty"` +} + +// CSharpFunctionRetrieveDefaultDefinitionParameters the parameters needed to retrieve the default function +// definition for a CSharp function. +type CSharpFunctionRetrieveDefaultDefinitionParameters struct { + // CSharpFunctionBindingRetrievalProperties - The binding retrieval properties associated with a CSharp function. + *CSharpFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftMachineLearningServices', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'BindingTypeMicrosoftStreamAnalyticsCLRUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +// MarshalJSON is the custom marshaler for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + csfrddp.BindingType = BindingTypeMicrosoftStreamAnalyticsCLRUdf + objectMap := make(map[string]interface{}) + if csfrddp.CSharpFunctionBindingRetrievalProperties != nil { + objectMap["bindingRetrievalProperties"] = csfrddp.CSharpFunctionBindingRetrievalProperties + } + if csfrddp.BindingType != "" { + objectMap["bindingType"] = csfrddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsCSharpFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) AsCSharpFunctionRetrieveDefaultDefinitionParameters() (*CSharpFunctionRetrieveDefaultDefinitionParameters, bool) { + return &csfrddp, true +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for CSharpFunctionRetrieveDefaultDefinitionParameters. +func (csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &csfrddp, true +} + +// UnmarshalJSON is the custom unmarshaler for CSharpFunctionRetrieveDefaultDefinitionParameters struct. +func (csfrddp *CSharpFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "bindingRetrievalProperties": + if v != nil { + var cSharpFunctionBindingRetrievalProperties CSharpFunctionBindingRetrievalProperties + err = json.Unmarshal(*v, &cSharpFunctionBindingRetrievalProperties) + if err != nil { + return err + } + csfrddp.CSharpFunctionBindingRetrievalProperties = &cSharpFunctionBindingRetrievalProperties + } + case "bindingType": + if v != nil { + var bindingType BindingType + err = json.Unmarshal(*v, &bindingType) + if err != nil { + return err + } + csfrddp.BindingType = bindingType + } + } + } + + return nil +} + +// CsvSerialization describes how data from an input is serialized or how data is serialized when written to an +// output in CSV format. +type CsvSerialization struct { + // CsvSerializationProperties - The properties that are associated with the CSV serialization type. Required on PUT (CreateOrReplace) requests. + *CsvSerializationProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeParquet', 'TypeCustomClr', 'TypeCsv', 'TypeJSON', 'TypeAvro' + Type TypeBasicSerialization `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for CsvSerialization. +func (cs CsvSerialization) MarshalJSON() ([]byte, error) { + cs.Type = TypeCsv + objectMap := make(map[string]interface{}) + if cs.CsvSerializationProperties != nil { + objectMap["properties"] = cs.CsvSerializationProperties + } + if cs.Type != "" { + objectMap["type"] = cs.Type + } + return json.Marshal(objectMap) +} + +// AsParquetSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { + return nil, false +} + +// AsCustomClrSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsCustomClrSerialization() (*CustomClrSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return &cs, true +} + +// AsJSONSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsAvroSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for CsvSerialization. +func (cs CsvSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &cs, true +} + +// UnmarshalJSON is the custom unmarshaler for CsvSerialization struct. +func (cs *CsvSerialization) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var csvSerializationProperties CsvSerializationProperties + err = json.Unmarshal(*v, &csvSerializationProperties) + if err != nil { + return err + } + cs.CsvSerializationProperties = &csvSerializationProperties + } + case "type": + if v != nil { + var typeVar TypeBasicSerialization + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + cs.Type = typeVar + } + } + } + + return nil +} + +// CsvSerializationProperties the properties that are associated with the CSV serialization type. +type CsvSerializationProperties struct { + // FieldDelimiter - Specifies the delimiter that will be used to separate comma-separated value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of supported values. Required on PUT (CreateOrReplace) requests. + FieldDelimiter *string `json:"fieldDelimiter,omitempty"` + // Encoding - Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + Encoding Encoding `json:"encoding,omitempty"` +} + +// CustomClrSerialization describes how data from an input is serialized or how data is serialized when written +// to an output in custom format. +type CustomClrSerialization struct { + // CustomClrSerializationProperties - The properties that are associated with the CustomClr serialization type. Required on PUT (CreateOrReplace) requests. + *CustomClrSerializationProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeParquet', 'TypeCustomClr', 'TypeCsv', 'TypeJSON', 'TypeAvro' + Type TypeBasicSerialization `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for CustomClrSerialization. +func (ccs CustomClrSerialization) MarshalJSON() ([]byte, error) { + ccs.Type = TypeCustomClr + objectMap := make(map[string]interface{}) + if ccs.CustomClrSerializationProperties != nil { + objectMap["properties"] = ccs.CustomClrSerializationProperties + } + if ccs.Type != "" { + objectMap["type"] = ccs.Type + } + return json.Marshal(objectMap) +} + +// AsParquetSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { + return nil, false +} + +// AsCustomClrSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsCustomClrSerialization() (*CustomClrSerialization, bool) { + return &ccs, true +} + +// AsCsvSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsAvroSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for CustomClrSerialization. +func (ccs CustomClrSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &ccs, true +} + +// UnmarshalJSON is the custom unmarshaler for CustomClrSerialization struct. +func (ccs *CustomClrSerialization) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var customClrSerializationProperties CustomClrSerializationProperties + err = json.Unmarshal(*v, &customClrSerializationProperties) + if err != nil { + return err + } + ccs.CustomClrSerializationProperties = &customClrSerializationProperties + } + case "type": + if v != nil { + var typeVar TypeBasicSerialization + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ccs.Type = typeVar + } + } + } + + return nil +} + +// CustomClrSerializationProperties the properties that are associated with the CustomClr serialization type. +type CustomClrSerializationProperties struct { + // SerializationDllPath - The serialization library path. + SerializationDllPath *string `json:"serializationDllPath,omitempty"` + // SerializationClassName - The serialization class name. + SerializationClassName *string `json:"serializationClassName,omitempty"` +} + +// DiagnosticCondition condition applicable to the resource, or to the job overall, that warrant customer +// attention. +type DiagnosticCondition struct { + // Since - READ-ONLY; The UTC timestamp of when the condition started. Customers should be able to find a corresponding event in the ops log around this time. + Since *string `json:"since,omitempty"` + // Code - READ-ONLY; The opaque diagnostic code. + Code *string `json:"code,omitempty"` + // Message - READ-ONLY; The human-readable message describing the condition in detail. Localized in the Accept-Language of the client request. + Message *string `json:"message,omitempty"` +} + +// Diagnostics describes conditions applicable to the Input, Output, or the job overall, that warrant customer +// attention. +type Diagnostics struct { + // Conditions - READ-ONLY; A collection of zero or more conditions applicable to the resource, or to the job overall, that warrant customer attention. + Conditions *[]DiagnosticCondition `json:"conditions,omitempty"` +} + +// DocumentDbOutputDataSource describes a DocumentDB output data source. +type DocumentDbOutputDataSource struct { + // DocumentDbOutputDataSourceProperties - The properties that are associated with a DocumentDB output. Required on PUT (CreateOrReplace) requests. + *DocumentDbOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) MarshalJSON() ([]byte, error) { + ddods.Type = TypeMicrosoftStorageDocumentDB + objectMap := make(map[string]interface{}) + if ddods.DocumentDbOutputDataSourceProperties != nil { + objectMap["properties"] = ddods.DocumentDbOutputDataSourceProperties + } + if ddods.Type != "" { + objectMap["type"] = ddods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return &ddods, true +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. +func (ddods DocumentDbOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ddods, true +} + +// UnmarshalJSON is the custom unmarshaler for DocumentDbOutputDataSource struct. +func (ddods *DocumentDbOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var documentDbOutputDataSourceProperties DocumentDbOutputDataSourceProperties + err = json.Unmarshal(*v, &documentDbOutputDataSourceProperties) + if err != nil { + return err + } + ddods.DocumentDbOutputDataSourceProperties = &documentDbOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ddods.Type = typeVar + } + } + } + + return nil +} + +// DocumentDbOutputDataSourceProperties the properties that are associated with a DocumentDB output. +type DocumentDbOutputDataSourceProperties struct { + // AccountID - The DocumentDB account name or ID. Required on PUT (CreateOrReplace) requests. + AccountID *string `json:"accountId,omitempty"` + // AccountKey - The account key for the DocumentDB account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` + // Database - The name of the DocumentDB database. Required on PUT (CreateOrReplace) requests. + Database *string `json:"database,omitempty"` + // CollectionNamePattern - The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT (CreateOrReplace) requests. + CollectionNamePattern *string `json:"collectionNamePattern,omitempty"` + // PartitionKey - The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} token, this property is required to be specified. + PartitionKey *string `json:"partitionKey,omitempty"` + // DocumentID - The name of the field in output events used to specify the primary key which insert or update operations are based on. + DocumentID *string `json:"documentId,omitempty"` +} + +// Error common error representation. +type Error struct { + // Error - Error definition properties. + Error *ErrorError `json:"error,omitempty"` +} + +// ErrorDetails common error details representation. +type ErrorDetails struct { + // Code - Error code. + Code *string `json:"code,omitempty"` + // Target - Error target. + Target *string `json:"target,omitempty"` + // Message - Error message. + Message *string `json:"message,omitempty"` +} + +// ErrorError error definition properties. +type ErrorError struct { + // Code - Error code. + Code *string `json:"code,omitempty"` + // Message - Error message. + Message *string `json:"message,omitempty"` + // Target - Error target. + Target *string `json:"target,omitempty"` + // Details - Error details. + Details *[]ErrorDetails `json:"details,omitempty"` +} + +// ErrorResponse describes the error that occurred. +type ErrorResponse struct { + // Code - READ-ONLY; Error code associated with the error that occurred. + Code *string `json:"code,omitempty"` + // Message - READ-ONLY; Describes the error in detail. + Message *string `json:"message,omitempty"` +} + +// EventHubDataSourceProperties the common properties that are associated with Event Hub data sources. +type EventHubDataSourceProperties struct { + // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + EventHubName *string `json:"eventHubName,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// EventHubOutputDataSource describes an Event Hub output data source. +type EventHubOutputDataSource struct { + // EventHubOutputDataSourceProperties - The properties that are associated with an Event Hub output. Required on PUT (CreateOrReplace) requests. + *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) MarshalJSON() ([]byte, error) { + ehods.Type = TypeMicrosoftServiceBusEventHub + objectMap := make(map[string]interface{}) + if ehods.EventHubOutputDataSourceProperties != nil { + objectMap["properties"] = ehods.EventHubOutputDataSourceProperties + } + if ehods.Type != "" { + objectMap["type"] = ehods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return &ehods, true +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. +func (ehods EventHubOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ehods, true +} + +// UnmarshalJSON is the custom unmarshaler for EventHubOutputDataSource struct. +func (ehods *EventHubOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var eventHubOutputDataSourceProperties EventHubOutputDataSourceProperties + err = json.Unmarshal(*v, &eventHubOutputDataSourceProperties) + if err != nil { + return err + } + ehods.EventHubOutputDataSourceProperties = &eventHubOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ehods.Type = typeVar + } + } + } + + return nil +} + +// EventHubOutputDataSourceProperties the properties that are associated with an Event Hub output. +type EventHubOutputDataSourceProperties struct { + // PartitionKey - The key/column that is used to determine to which partition to send event data. + PartitionKey *string `json:"partitionKey,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + EventHubName *string `json:"eventHubName,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// EventHubStreamInputDataSource describes an Event Hub input data source that contains stream data. +type EventHubStreamInputDataSource struct { + // EventHubStreamInputDataSourceProperties - The properties that are associated with an Event Hub input containing stream data. Required on PUT (CreateOrReplace) requests. + *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + ehsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub + objectMap := make(map[string]interface{}) + if ehsids.EventHubStreamInputDataSourceProperties != nil { + objectMap["properties"] = ehsids.EventHubStreamInputDataSourceProperties + } + if ehsids.Type != "" { + objectMap["type"] = ehsids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return &ehsids, true +} + +// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { + return nil, false +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. +func (ehsids EventHubStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &ehsids, true +} + +// UnmarshalJSON is the custom unmarshaler for EventHubStreamInputDataSource struct. +func (ehsids *EventHubStreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var eventHubStreamInputDataSourceProperties EventHubStreamInputDataSourceProperties + err = json.Unmarshal(*v, &eventHubStreamInputDataSourceProperties) + if err != nil { + return err + } + ehsids.EventHubStreamInputDataSourceProperties = &eventHubStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ehsids.Type = typeVar + } + } + } + + return nil +} + +// EventHubStreamInputDataSourceProperties the properties that are associated with a Event Hub input containing +// stream data. +type EventHubStreamInputDataSourceProperties struct { + // ConsumerGroupName - The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. + EventHubName *string `json:"eventHubName,omitempty"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// EventHubV2OutputDataSource describes an Event Hub output data source. +type EventHubV2OutputDataSource struct { + // EventHubOutputDataSourceProperties - The properties that are associated with an Event Hub output. Required on PUT (CreateOrReplace) requests. + *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) MarshalJSON() ([]byte, error) { + ehvods.Type = TypeMicrosoftEventHubEventHub + objectMap := make(map[string]interface{}) + if ehvods.EventHubOutputDataSourceProperties != nil { + objectMap["properties"] = ehvods.EventHubOutputDataSourceProperties + } + if ehvods.Type != "" { + objectMap["type"] = ehvods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return &ehvods, true +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. +func (ehvods EventHubV2OutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ehvods, true +} + +// UnmarshalJSON is the custom unmarshaler for EventHubV2OutputDataSource struct. +func (ehvods *EventHubV2OutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var eventHubOutputDataSourceProperties EventHubOutputDataSourceProperties + err = json.Unmarshal(*v, &eventHubOutputDataSourceProperties) + if err != nil { + return err + } + ehvods.EventHubOutputDataSourceProperties = &eventHubOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ehvods.Type = typeVar + } + } + } + + return nil +} + +// EventHubV2StreamInputDataSource describes an Event Hub input data source that contains stream data. +type EventHubV2StreamInputDataSource struct { + // EventHubStreamInputDataSourceProperties - The properties that are associated with an Event Hub input containing stream data. Required on PUT (CreateOrReplace) requests. + *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) MarshalJSON() ([]byte, error) { + ehvsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub + objectMap := make(map[string]interface{}) + if ehvsids.EventHubStreamInputDataSourceProperties != nil { + objectMap["properties"] = ehvsids.EventHubStreamInputDataSourceProperties + } + if ehvsids.Type != "" { + objectMap["type"] = ehvsids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { + return &ehvsids, true +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. +func (ehvsids EventHubV2StreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &ehvsids, true +} + +// UnmarshalJSON is the custom unmarshaler for EventHubV2StreamInputDataSource struct. +func (ehvsids *EventHubV2StreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var eventHubStreamInputDataSourceProperties EventHubStreamInputDataSourceProperties + err = json.Unmarshal(*v, &eventHubStreamInputDataSourceProperties) + if err != nil { + return err + } + ehvsids.EventHubStreamInputDataSourceProperties = &eventHubStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ehvsids.Type = typeVar + } + } + } + + return nil +} + +// External the storage account where the custom code artifacts are located. +type External struct { + StorageAccount *StorageAccount `json:"storageAccount,omitempty"` + Container *string `json:"container,omitempty"` + Path *string `json:"path,omitempty"` +} + +// Function a function object, containing all information associated with the named function. All functions are +// contained under a streaming job. +type Function struct { + autorest.Response `json:"-"` + // Properties - The properties that are associated with a function. + Properties BasicFunctionProperties `json:"properties,omitempty"` + // ID - READ-ONLY; Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Function. +func (f Function) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + objectMap["properties"] = f.Properties + if f.Name != nil { + objectMap["name"] = f.Name + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Function struct. +func (f *Function) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + properties, err := unmarshalBasicFunctionProperties(*v) + if err != nil { + return err + } + f.Properties = properties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + f.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + f.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + f.Type = &typeVar + } + } + } + + return nil +} + +// BasicFunctionBinding the physical binding of the function. For example, in the Azure Machine Learning web service’s +// case, this describes the endpoint. +type BasicFunctionBinding interface { + AsAzureMachineLearningStudioFunctionBinding() (*AzureMachineLearningStudioFunctionBinding, bool) + AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) + AsCSharpFunctionBinding() (*CSharpFunctionBinding, bool) + AsAzureMachineLearningServiceFunctionBinding() (*AzureMachineLearningServiceFunctionBinding, bool) + AsFunctionBinding() (*FunctionBinding, bool) +} + +// FunctionBinding the physical binding of the function. For example, in the Azure Machine Learning web +// service’s case, this describes the endpoint. +type FunctionBinding struct { + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftMachineLearningWebService', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftStreamAnalyticsCLRUdf', 'TypeMicrosoftMachineLearningServices' + Type Type `json:"type,omitempty"` +} + +func unmarshalBasicFunctionBinding(body []byte) (BasicFunctionBinding, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeMicrosoftMachineLearningWebService): + var amlsfb AzureMachineLearningStudioFunctionBinding + err := json.Unmarshal(body, &amlsfb) + return amlsfb, err + case string(TypeMicrosoftStreamAnalyticsJavascriptUdf): + var jsfb JavaScriptFunctionBinding + err := json.Unmarshal(body, &jsfb) + return jsfb, err + case string(TypeMicrosoftStreamAnalyticsCLRUdf): + var csfb CSharpFunctionBinding + err := json.Unmarshal(body, &csfb) + return csfb, err + case string(TypeMicrosoftMachineLearningServices): + var amlsfb AzureMachineLearningServiceFunctionBinding + err := json.Unmarshal(body, &amlsfb) + return amlsfb, err + default: + var fb FunctionBinding + err := json.Unmarshal(body, &fb) + return fb, err + } +} +func unmarshalBasicFunctionBindingArray(body []byte) ([]BasicFunctionBinding, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + fbArray := make([]BasicFunctionBinding, len(rawMessages)) + + for index, rawMessage := range rawMessages { + fb, err := unmarshalBasicFunctionBinding(*rawMessage) + if err != nil { + return nil, err + } + fbArray[index] = fb + } + return fbArray, nil +} + +// MarshalJSON is the custom marshaler for FunctionBinding. +func (fb FunctionBinding) MarshalJSON() ([]byte, error) { + fb.Type = TypeFunctionBinding + objectMap := make(map[string]interface{}) + if fb.Type != "" { + objectMap["type"] = fb.Type + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsAzureMachineLearningStudioFunctionBinding() (*AzureMachineLearningStudioFunctionBinding, bool) { + return nil, false +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return nil, false +} + +// AsCSharpFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsCSharpFunctionBinding() (*CSharpFunctionBinding, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsAzureMachineLearningServiceFunctionBinding() (*AzureMachineLearningServiceFunctionBinding, bool) { + return nil, false +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return &fb, true +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. +func (fb FunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &fb, true +} + +// FunctionConfiguration ... +type FunctionConfiguration struct { + Inputs *[]FunctionInput `json:"inputs,omitempty"` + Output *FunctionOutput `json:"output,omitempty"` + Binding BasicFunctionBinding `json:"binding,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for FunctionConfiguration struct. +func (fc *FunctionConfiguration) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "inputs": + if v != nil { + var inputs []FunctionInput + err = json.Unmarshal(*v, &inputs) + if err != nil { + return err + } + fc.Inputs = &inputs + } + case "output": + if v != nil { + var output FunctionOutput + err = json.Unmarshal(*v, &output) + if err != nil { + return err + } + fc.Output = &output + } + case "binding": + if v != nil { + binding, err := unmarshalBasicFunctionBinding(*v) + if err != nil { + return err + } + fc.Binding = binding + } + } + } + + return nil +} + +// FunctionInput describes one input parameter of a function. +type FunctionInput struct { + // DataType - The (Azure Stream Analytics supported) data type of the function input parameter. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + DataType *string `json:"dataType,omitempty"` + // IsConfigurationParameter - A flag indicating if the parameter is a configuration parameter. True if this input parameter is expected to be a constant. Default is false. + IsConfigurationParameter *bool `json:"isConfigurationParameter,omitempty"` +} + +// FunctionListResult object containing a list of functions under a streaming job. +type FunctionListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of functions under a streaming job. Populated by a 'List' operation. + Value *[]Function `json:"value,omitempty"` + // NextLink - READ-ONLY; The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// FunctionListResultIterator provides access to a complete listing of Function values. +type FunctionListResultIterator struct { + i int + page FunctionListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *FunctionListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *FunctionListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter FunctionListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter FunctionListResultIterator) Response() FunctionListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter FunctionListResultIterator) Value() Function { + if !iter.page.NotDone() { + return Function{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the FunctionListResultIterator type. +func NewFunctionListResultIterator(page FunctionListResultPage) FunctionListResultIterator { + return FunctionListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (flr FunctionListResult) IsEmpty() bool { + return flr.Value == nil || len(*flr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (flr FunctionListResult) hasNextLink() bool { + return flr.NextLink != nil && len(*flr.NextLink) != 0 +} + +// functionListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (flr FunctionListResult) functionListResultPreparer(ctx context.Context) (*http.Request, error) { + if !flr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(flr.NextLink))) +} + +// FunctionListResultPage contains a page of Function values. +type FunctionListResultPage struct { + fn func(context.Context, FunctionListResult) (FunctionListResult, error) + flr FunctionListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *FunctionListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/FunctionListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.flr) + if err != nil { + return err + } + page.flr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *FunctionListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page FunctionListResultPage) NotDone() bool { + return !page.flr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page FunctionListResultPage) Response() FunctionListResult { + return page.flr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page FunctionListResultPage) Values() []Function { + if page.flr.IsEmpty() { + return nil + } + return *page.flr.Value +} + +// Creates a new instance of the FunctionListResultPage type. +func NewFunctionListResultPage(getNextPage func(context.Context, FunctionListResult) (FunctionListResult, error)) FunctionListResultPage { + return FunctionListResultPage{fn: getNextPage} +} + +// FunctionOutput describes the output of a function. +type FunctionOutput struct { + // DataType - The (Azure Stream Analytics supported) data type of the function output. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + DataType *string `json:"dataType,omitempty"` +} + +// BasicFunctionProperties the properties that are associated with a function. +type BasicFunctionProperties interface { + AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) + AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) + AsFunctionProperties() (*FunctionProperties, bool) +} + +// FunctionProperties the properties that are associated with a function. +type FunctionProperties struct { + // Etag - READ-ONLY; The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + *FunctionConfiguration `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionProperties', 'TypeScalar', 'TypeAggregate' + Type TypeBasicFunctionProperties `json:"type,omitempty"` +} + +func unmarshalBasicFunctionProperties(body []byte) (BasicFunctionProperties, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeScalar): + var sfp ScalarFunctionProperties + err := json.Unmarshal(body, &sfp) + return sfp, err + case string(TypeAggregate): + var afp AggregateFunctionProperties + err := json.Unmarshal(body, &afp) + return afp, err + default: + var fp FunctionProperties + err := json.Unmarshal(body, &fp) + return fp, err + } +} +func unmarshalBasicFunctionPropertiesArray(body []byte) ([]BasicFunctionProperties, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + fpArray := make([]BasicFunctionProperties, len(rawMessages)) + + for index, rawMessage := range rawMessages { + fp, err := unmarshalBasicFunctionProperties(*rawMessage) + if err != nil { + return nil, err + } + fpArray[index] = fp + } + return fpArray, nil +} + +// MarshalJSON is the custom marshaler for FunctionProperties. +func (fp FunctionProperties) MarshalJSON() ([]byte, error) { + fp.Type = TypeFunctionProperties + objectMap := make(map[string]interface{}) + if fp.FunctionConfiguration != nil { + objectMap["properties"] = fp.FunctionConfiguration + } + if fp.Type != "" { + objectMap["type"] = fp.Type + } + return json.Marshal(objectMap) +} + +// AsScalarFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { + return nil, false +} + +// AsAggregateFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) { + return nil, false +} + +// AsFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { + return &fp, true +} + +// AsBasicFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. +func (fp FunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { + return &fp, true +} + +// UnmarshalJSON is the custom unmarshaler for FunctionProperties struct. +func (fp *FunctionProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + fp.Etag = &etag + } + case "properties": + if v != nil { + var functionConfiguration FunctionConfiguration + err = json.Unmarshal(*v, &functionConfiguration) + if err != nil { + return err + } + fp.FunctionConfiguration = &functionConfiguration + } + case "type": + if v != nil { + var typeVar TypeBasicFunctionProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + fp.Type = typeVar + } + } + } + + return nil +} + +// BasicFunctionRetrieveDefaultDefinitionParameters parameters used to specify the type of function to retrieve the +// default definition for. +type BasicFunctionRetrieveDefaultDefinitionParameters interface { + AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, bool) + AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, bool) + AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) + AsCSharpFunctionRetrieveDefaultDefinitionParameters() (*CSharpFunctionRetrieveDefaultDefinitionParameters, bool) + AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) +} + +// FunctionRetrieveDefaultDefinitionParameters parameters used to specify the type of function to retrieve the +// default definition for. +type FunctionRetrieveDefaultDefinitionParameters struct { + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftMachineLearningServices', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'BindingTypeMicrosoftStreamAnalyticsCLRUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +func unmarshalBasicFunctionRetrieveDefaultDefinitionParameters(body []byte) (BasicFunctionRetrieveDefaultDefinitionParameters, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["bindingType"] { + case string(BindingTypeMicrosoftMachineLearningWebService): + var amlsfrddp AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &amlsfrddp) + return amlsfrddp, err + case string(BindingTypeMicrosoftMachineLearningServices): + var amlsfrddp AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &amlsfrddp) + return amlsfrddp, err + case string(BindingTypeMicrosoftStreamAnalyticsJavascriptUdf): + var jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &jsfrddp) + return jsfrddp, err + case string(BindingTypeMicrosoftStreamAnalyticsCLRUdf): + var csfrddp CSharpFunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &csfrddp) + return csfrddp, err + default: + var frddp FunctionRetrieveDefaultDefinitionParameters + err := json.Unmarshal(body, &frddp) + return frddp, err + } +} +func unmarshalBasicFunctionRetrieveDefaultDefinitionParametersArray(body []byte) ([]BasicFunctionRetrieveDefaultDefinitionParameters, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + frddpArray := make([]BasicFunctionRetrieveDefaultDefinitionParameters, len(rawMessages)) + + for index, rawMessage := range rawMessages { + frddp, err := unmarshalBasicFunctionRetrieveDefaultDefinitionParameters(*rawMessage) + if err != nil { + return nil, err + } + frddpArray[index] = frddp + } + return frddpArray, nil +} + +// MarshalJSON is the custom marshaler for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + frddp.BindingType = BindingTypeFunctionRetrieveDefaultDefinitionParameters + objectMap := make(map[string]interface{}) + if frddp.BindingType != "" { + objectMap["bindingType"] = frddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsCSharpFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsCSharpFunctionRetrieveDefaultDefinitionParameters() (*CSharpFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return &frddp, true +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. +func (frddp FunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &frddp, true +} + +// FunctionsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type FunctionsTestFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *FunctionsTestFuture) Result(client FunctionsClient) (rts ResourceTestStatus, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsTestFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.FunctionsTestFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { + rts, err = client.TestResponder(rts.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsTestFuture", "Result", rts.Response.Response, "Failure responding to request") + } + } + return +} + +// Identity describes how identity is verified +type Identity struct { + TenantID *string `json:"tenantId,omitempty"` + PrincipalID *string `json:"principalId,omitempty"` + Type *string `json:"type,omitempty"` +} + +// Input an input object, containing all information associated with the named input. All inputs are contained +// under a streaming job. +type Input struct { + autorest.Response `json:"-"` + // Properties - The properties that are associated with an input. Required on PUT (CreateOrReplace) requests. + Properties BasicInputProperties `json:"properties,omitempty"` + // ID - READ-ONLY; Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Input. +func (i Input) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + objectMap["properties"] = i.Properties + if i.Name != nil { + objectMap["name"] = i.Name + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Input struct. +func (i *Input) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + properties, err := unmarshalBasicInputProperties(*v) + if err != nil { + return err + } + i.Properties = properties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + i.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + i.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + i.Type = &typeVar + } + } + } + + return nil +} + +// InputListResult object containing a list of inputs under a streaming job. +type InputListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of inputs under a streaming job. Populated by a 'List' operation. + Value *[]Input `json:"value,omitempty"` + // NextLink - READ-ONLY; The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// InputListResultIterator provides access to a complete listing of Input values. +type InputListResultIterator struct { + i int + page InputListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *InputListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *InputListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter InputListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter InputListResultIterator) Response() InputListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter InputListResultIterator) Value() Input { + if !iter.page.NotDone() { + return Input{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the InputListResultIterator type. +func NewInputListResultIterator(page InputListResultPage) InputListResultIterator { + return InputListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ilr InputListResult) IsEmpty() bool { + return ilr.Value == nil || len(*ilr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (ilr InputListResult) hasNextLink() bool { + return ilr.NextLink != nil && len(*ilr.NextLink) != 0 +} + +// inputListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ilr InputListResult) inputListResultPreparer(ctx context.Context) (*http.Request, error) { + if !ilr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ilr.NextLink))) +} + +// InputListResultPage contains a page of Input values. +type InputListResultPage struct { + fn func(context.Context, InputListResult) (InputListResult, error) + ilr InputListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *InputListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InputListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.ilr) + if err != nil { + return err + } + page.ilr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *InputListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page InputListResultPage) NotDone() bool { + return !page.ilr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page InputListResultPage) Response() InputListResult { + return page.ilr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page InputListResultPage) Values() []Input { + if page.ilr.IsEmpty() { + return nil + } + return *page.ilr.Value +} + +// Creates a new instance of the InputListResultPage type. +func NewInputListResultPage(getNextPage func(context.Context, InputListResult) (InputListResult, error)) InputListResultPage { + return InputListResultPage{fn: getNextPage} +} + +// BasicInputProperties the properties that are associated with an input. +type BasicInputProperties interface { + AsStreamInputProperties() (*StreamInputProperties, bool) + AsReferenceInputProperties() (*ReferenceInputProperties, bool) + AsInputProperties() (*InputProperties, bool) +} + +// InputProperties the properties that are associated with an input. +type InputProperties struct { + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - READ-ONLY; The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + Compression *Compression `json:"compression,omitempty"` + // PartitionKey - partitionKey Describes a key in the input data which is used for partitioning the input data + PartitionKey *string `json:"partitionKey,omitempty"` + // Type - Possible values include: 'TypeInputProperties', 'TypeStream', 'TypeReference' + Type TypeBasicInputProperties `json:"type,omitempty"` +} + +func unmarshalBasicInputProperties(body []byte) (BasicInputProperties, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeStream): + var sip StreamInputProperties + err := json.Unmarshal(body, &sip) + return sip, err + case string(TypeReference): + var rip ReferenceInputProperties + err := json.Unmarshal(body, &rip) + return rip, err + default: + var IP InputProperties + err := json.Unmarshal(body, &IP) + return IP, err + } +} +func unmarshalBasicInputPropertiesArray(body []byte) ([]BasicInputProperties, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + IPArray := make([]BasicInputProperties, len(rawMessages)) + + for index, rawMessage := range rawMessages { + IP, err := unmarshalBasicInputProperties(*rawMessage) + if err != nil { + return nil, err + } + IPArray[index] = IP + } + return IPArray, nil +} + +// MarshalJSON is the custom marshaler for InputProperties. +func (IP InputProperties) MarshalJSON() ([]byte, error) { + IP.Type = TypeInputProperties + objectMap := make(map[string]interface{}) + objectMap["serialization"] = IP.Serialization + if IP.Compression != nil { + objectMap["compression"] = IP.Compression + } + if IP.PartitionKey != nil { + objectMap["partitionKey"] = IP.PartitionKey + } + if IP.Type != "" { + objectMap["type"] = IP.Type + } + return json.Marshal(objectMap) +} + +// AsStreamInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { + return nil, false +} + +// AsReferenceInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { + return nil, false +} + +// AsInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsInputProperties() (*InputProperties, bool) { + return &IP, true +} + +// AsBasicInputProperties is the BasicInputProperties implementation for InputProperties. +func (IP InputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { + return &IP, true +} + +// UnmarshalJSON is the custom unmarshaler for InputProperties struct. +func (IP *InputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + IP.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + IP.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + IP.Etag = &etag + } + case "compression": + if v != nil { + var compression Compression + err = json.Unmarshal(*v, &compression) + if err != nil { + return err + } + IP.Compression = &compression + } + case "partitionKey": + if v != nil { + var partitionKey string + err = json.Unmarshal(*v, &partitionKey) + if err != nil { + return err + } + IP.PartitionKey = &partitionKey + } + case "type": + if v != nil { + var typeVar TypeBasicInputProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + IP.Type = typeVar + } + } + } + + return nil +} + +// InputsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type InputsTestFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *InputsTestFuture) Result(client InputsClient) (rts ResourceTestStatus, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsTestFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.InputsTestFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { + rts, err = client.TestResponder(rts.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.InputsTestFuture", "Result", rts.Response.Response, "Failure responding to request") + } + } + return +} + +// IoTHubStreamInputDataSource describes an IoT Hub input data source that contains stream data. +type IoTHubStreamInputDataSource struct { + // IoTHubStreamInputDataSourceProperties - The properties that are associated with an IoT Hub input containing stream data. Required on PUT (CreateOrReplace) requests. + *IoTHubStreamInputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + ithsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs + objectMap := make(map[string]interface{}) + if ithsids.IoTHubStreamInputDataSourceProperties != nil { + objectMap["properties"] = ithsids.IoTHubStreamInputDataSourceProperties + } + if ithsids.Type != "" { + objectMap["type"] = ithsids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { + return nil, false +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return &ithsids, true +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return nil, false +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. +func (ithsids IoTHubStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &ithsids, true +} + +// UnmarshalJSON is the custom unmarshaler for IoTHubStreamInputDataSource struct. +func (ithsids *IoTHubStreamInputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var ioTHubStreamInputDataSourceProperties IoTHubStreamInputDataSourceProperties + err = json.Unmarshal(*v, &ioTHubStreamInputDataSourceProperties) + if err != nil { + return err + } + ithsids.IoTHubStreamInputDataSourceProperties = &ioTHubStreamInputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicStreamInputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ithsids.Type = typeVar + } + } + } + + return nil +} + +// IoTHubStreamInputDataSourceProperties the properties that are associated with a IoT Hub input containing +// stream data. +type IoTHubStreamInputDataSourceProperties struct { + // IotHubNamespace - The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) requests. + IotHubNamespace *string `json:"iotHubNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the IoT Hub. This policy must contain at least the Service connect permission. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // ConsumerGroupName - The name of an IoT Hub Consumer Group that should be used to read events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + // Endpoint - The IoT Hub endpoint to connect to (ie. messages/events, messages/operationsMonitoringEvents, etc.). + Endpoint *string `json:"endpoint,omitempty"` +} + +// JavaScriptFunctionBinding the binding to a JavaScript function. +type JavaScriptFunctionBinding struct { + // JavaScriptFunctionBindingProperties - The binding properties associated with a JavaScript function. + *JavaScriptFunctionBindingProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionBinding', 'TypeMicrosoftMachineLearningWebService', 'TypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeMicrosoftStreamAnalyticsCLRUdf', 'TypeMicrosoftMachineLearningServices' + Type Type `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) MarshalJSON() ([]byte, error) { + jsfb.Type = TypeMicrosoftStreamAnalyticsJavascriptUdf + objectMap := make(map[string]interface{}) + if jsfb.JavaScriptFunctionBindingProperties != nil { + objectMap["properties"] = jsfb.JavaScriptFunctionBindingProperties + } + if jsfb.Type != "" { + objectMap["type"] = jsfb.Type + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsAzureMachineLearningStudioFunctionBinding() (*AzureMachineLearningStudioFunctionBinding, bool) { + return nil, false +} + +// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { + return &jsfb, true +} + +// AsCSharpFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsCSharpFunctionBinding() (*CSharpFunctionBinding, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsAzureMachineLearningServiceFunctionBinding() (*AzureMachineLearningServiceFunctionBinding, bool) { + return nil, false +} + +// AsFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { + return nil, false +} + +// AsBasicFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. +func (jsfb JavaScriptFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { + return &jsfb, true +} + +// UnmarshalJSON is the custom unmarshaler for JavaScriptFunctionBinding struct. +func (jsfb *JavaScriptFunctionBinding) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var javaScriptFunctionBindingProperties JavaScriptFunctionBindingProperties + err = json.Unmarshal(*v, &javaScriptFunctionBindingProperties) + if err != nil { + return err + } + jsfb.JavaScriptFunctionBindingProperties = &javaScriptFunctionBindingProperties + } + case "type": + if v != nil { + var typeVar Type + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + jsfb.Type = typeVar + } + } + } + + return nil +} + +// JavaScriptFunctionBindingProperties the binding properties associated with a JavaScript function. +type JavaScriptFunctionBindingProperties struct { + // Script - The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }' + Script *string `json:"script,omitempty"` +} + +// JavaScriptFunctionBindingRetrievalProperties the binding retrieval properties associated with a JavaScript +// function. +type JavaScriptFunctionBindingRetrievalProperties struct { + // Script - The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. + Script *string `json:"script,omitempty"` + // UdfType - The function type. Possible values include: 'Scalar' + UdfType UdfType `json:"udfType,omitempty"` +} + +// JavaScriptFunctionRetrieveDefaultDefinitionParameters the parameters needed to retrieve the default function +// definition for a JavaScript function. +type JavaScriptFunctionRetrieveDefaultDefinitionParameters struct { + // JavaScriptFunctionBindingRetrievalProperties - The binding retrieval properties associated with a JavaScript function. + *JavaScriptFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftMachineLearningServices', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'BindingTypeMicrosoftStreamAnalyticsCLRUdf' + BindingType BindingType `json:"bindingType,omitempty"` +} + +// MarshalJSON is the custom marshaler for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + jsfrddp.BindingType = BindingTypeMicrosoftStreamAnalyticsJavascriptUdf + objectMap := make(map[string]interface{}) + if jsfrddp.JavaScriptFunctionBindingRetrievalProperties != nil { + objectMap["bindingRetrievalProperties"] = jsfrddp.JavaScriptFunctionBindingRetrievalProperties + } + if jsfrddp.BindingType != "" { + objectMap["bindingType"] = jsfrddp.BindingType + } + return json.Marshal(objectMap) +} + +// AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { + return &jsfrddp, true +} + +// AsCSharpFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsCSharpFunctionRetrieveDefaultDefinitionParameters() (*CSharpFunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { + return nil, false +} + +// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. +func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { + return &jsfrddp, true +} + +// UnmarshalJSON is the custom unmarshaler for JavaScriptFunctionRetrieveDefaultDefinitionParameters struct. +func (jsfrddp *JavaScriptFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "bindingRetrievalProperties": + if v != nil { + var javaScriptFunctionBindingRetrievalProperties JavaScriptFunctionBindingRetrievalProperties + err = json.Unmarshal(*v, &javaScriptFunctionBindingRetrievalProperties) + if err != nil { + return err + } + jsfrddp.JavaScriptFunctionBindingRetrievalProperties = &javaScriptFunctionBindingRetrievalProperties + } + case "bindingType": + if v != nil { + var bindingType BindingType + err = json.Unmarshal(*v, &bindingType) + if err != nil { + return err + } + jsfrddp.BindingType = bindingType + } + } + } + + return nil +} + +// JobStorageAccount the properties that are associated with an Azure Storage account with MSI +type JobStorageAccount struct { + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` + // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` +} + +// JSONSerialization describes how data from an input is serialized or how data is serialized when written to +// an output in JSON format. +type JSONSerialization struct { + // JSONSerializationProperties - The properties that are associated with the JSON serialization type. Required on PUT (CreateOrReplace) requests. + *JSONSerializationProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeParquet', 'TypeCustomClr', 'TypeCsv', 'TypeJSON', 'TypeAvro' + Type TypeBasicSerialization `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for JSONSerialization. +func (js JSONSerialization) MarshalJSON() ([]byte, error) { + js.Type = TypeJSON + objectMap := make(map[string]interface{}) + if js.JSONSerializationProperties != nil { + objectMap["properties"] = js.JSONSerializationProperties + } + if js.Type != "" { + objectMap["type"] = js.Type + } + return json.Marshal(objectMap) +} + +// AsParquetSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { + return nil, false +} + +// AsCustomClrSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsCustomClrSerialization() (*CustomClrSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return &js, true +} + +// AsAvroSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for JSONSerialization. +func (js JSONSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &js, true +} + +// UnmarshalJSON is the custom unmarshaler for JSONSerialization struct. +func (js *JSONSerialization) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var JSONSerializationProperties JSONSerializationProperties + err = json.Unmarshal(*v, &JSONSerializationProperties) + if err != nil { + return err + } + js.JSONSerializationProperties = &JSONSerializationProperties + } + case "type": + if v != nil { + var typeVar TypeBasicSerialization + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + js.Type = typeVar + } + } + } + + return nil +} + +// JSONSerializationProperties the properties that are associated with the JSON serialization type. +type JSONSerializationProperties struct { + // Encoding - Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + Encoding Encoding `json:"encoding,omitempty"` + // Format - This property only applies to JSON serialization of outputs only. It is not applicable to inputs. This property specifies the format of the JSON the output will be written in. The currently supported values are 'lineSeparated' indicating the output will be formatted by having each JSON object separated by a new line and 'array' indicating the output will be formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible values include: 'LineSeparated', 'Array' + Format JSONOutputSerializationFormat `json:"format,omitempty"` +} + +// OAuthBasedDataSourceProperties the properties that are associated with data sources that use OAuth as their +// authentication model. +type OAuthBasedDataSourceProperties struct { + // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + RefreshToken *string `json:"refreshToken,omitempty"` + // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` + // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` +} + +// Operation a Stream Analytics REST API operation +type Operation struct { + // Name - READ-ONLY; The name of the operation being performed on this particular object. + Name *string `json:"name,omitempty"` + // Display - READ-ONLY; Contains the localized display information for this particular operation / action. + Display *OperationDisplay `json:"display,omitempty"` +} + +// OperationDisplay contains the localized display information for this particular operation / action. +type OperationDisplay struct { + // Provider - READ-ONLY; The localized friendly form of the resource provider name. + Provider *string `json:"provider,omitempty"` + // Resource - READ-ONLY; The localized friendly form of the resource type related to this action/operation. + Resource *string `json:"resource,omitempty"` + // Operation - READ-ONLY; The localized friendly name for the operation. + Operation *string `json:"operation,omitempty"` + // Description - READ-ONLY; The localized friendly description for the operation. + Description *string `json:"description,omitempty"` +} + +// OperationListResult result of the request to list Stream Analytics operations. It contains a list of +// operations and a URL link to get the next set of results. +type OperationListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; List of Stream Analytics operations supported by the Microsoft.StreamAnalytics resource provider. + Value *[]Operation `json:"value,omitempty"` + // NextLink - READ-ONLY; URL to get the next set of operation list results if there are any. + NextLink *string `json:"nextLink,omitempty"` +} + +// OperationListResultIterator provides access to a complete listing of Operation values. +type OperationListResultIterator struct { + i int + page OperationListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *OperationListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *OperationListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter OperationListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter OperationListResultIterator) Response() OperationListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter OperationListResultIterator) Value() Operation { + if !iter.page.NotDone() { + return Operation{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the OperationListResultIterator type. +func NewOperationListResultIterator(page OperationListResultPage) OperationListResultIterator { + return OperationListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (olr OperationListResult) IsEmpty() bool { + return olr.Value == nil || len(*olr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (olr OperationListResult) hasNextLink() bool { + return olr.NextLink != nil && len(*olr.NextLink) != 0 +} + +// operationListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (olr OperationListResult) operationListResultPreparer(ctx context.Context) (*http.Request, error) { + if !olr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(olr.NextLink))) +} + +// OperationListResultPage contains a page of Operation values. +type OperationListResultPage struct { + fn func(context.Context, OperationListResult) (OperationListResult, error) + olr OperationListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *OperationListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.olr) + if err != nil { + return err + } + page.olr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *OperationListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page OperationListResultPage) NotDone() bool { + return !page.olr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page OperationListResultPage) Response() OperationListResult { + return page.olr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page OperationListResultPage) Values() []Operation { + if page.olr.IsEmpty() { + return nil + } + return *page.olr.Value +} + +// Creates a new instance of the OperationListResultPage type. +func NewOperationListResultPage(getNextPage func(context.Context, OperationListResult) (OperationListResult, error)) OperationListResultPage { + return OperationListResultPage{fn: getNextPage} +} + +// Output an output object, containing all information associated with the named output. All outputs are +// contained under a streaming job. +type Output struct { + autorest.Response `json:"-"` + // OutputProperties - The properties that are associated with an output. Required on PUT (CreateOrReplace) requests. + *OutputProperties `json:"properties,omitempty"` + // ID - READ-ONLY; Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Output. +func (o Output) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if o.OutputProperties != nil { + objectMap["properties"] = o.OutputProperties + } + if o.Name != nil { + objectMap["name"] = o.Name + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Output struct. +func (o *Output) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var outputProperties OutputProperties + err = json.Unmarshal(*v, &outputProperties) + if err != nil { + return err + } + o.OutputProperties = &outputProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + o.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + o.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + o.Type = &typeVar + } + } + } + + return nil +} + +// BasicOutputDataSource describes the data source that output will be written to. +type BasicOutputDataSource interface { + AsBlobOutputDataSource() (*BlobOutputDataSource, bool) + AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) + AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) + AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) + AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) + AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) + AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) + AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) + AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) + AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) + AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) + AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) + AsOutputDataSource() (*OutputDataSource, bool) +} + +// OutputDataSource describes the data source that output will be written to. +type OutputDataSource struct { + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +func unmarshalBasicOutputDataSource(body []byte) (BasicOutputDataSource, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeMicrosoftStorageBlob): + var bods BlobOutputDataSource + err := json.Unmarshal(body, &bods) + return bods, err + case string(TypeMicrosoftStorageTable): + var atods AzureTableOutputDataSource + err := json.Unmarshal(body, &atods) + return atods, err + case string(TypeMicrosoftServiceBusEventHub): + var ehods EventHubOutputDataSource + err := json.Unmarshal(body, &ehods) + return ehods, err + case string(TypeMicrosoftEventHubEventHub): + var ehvods EventHubV2OutputDataSource + err := json.Unmarshal(body, &ehvods) + return ehvods, err + case string(TypeMicrosoftSQLServerDatabase): + var asdods AzureSQLDatabaseOutputDataSource + err := json.Unmarshal(body, &asdods) + return asdods, err + case string(TypeMicrosoftSQLServerDataWarehouse): + var asods AzureSynapseOutputDataSource + err := json.Unmarshal(body, &asods) + return asods, err + case string(TypeMicrosoftStorageDocumentDB): + var ddods DocumentDbOutputDataSource + err := json.Unmarshal(body, &ddods) + return ddods, err + case string(TypeMicrosoftAzureFunction): + var afods AzureFunctionOutputDataSource + err := json.Unmarshal(body, &afods) + return afods, err + case string(TypeMicrosoftServiceBusQueue): + var sbqods ServiceBusQueueOutputDataSource + err := json.Unmarshal(body, &sbqods) + return sbqods, err + case string(TypeMicrosoftServiceBusTopic): + var sbtods ServiceBusTopicOutputDataSource + err := json.Unmarshal(body, &sbtods) + return sbtods, err + case string(TypePowerBI): + var pbods PowerBIOutputDataSource + err := json.Unmarshal(body, &pbods) + return pbods, err + case string(TypeMicrosoftDataLakeAccounts): + var adlsods AzureDataLakeStoreOutputDataSource + err := json.Unmarshal(body, &adlsods) + return adlsods, err + default: + var ods OutputDataSource + err := json.Unmarshal(body, &ods) + return ods, err + } +} +func unmarshalBasicOutputDataSourceArray(body []byte) ([]BasicOutputDataSource, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + odsArray := make([]BasicOutputDataSource, len(rawMessages)) + + for index, rawMessage := range rawMessages { + ods, err := unmarshalBasicOutputDataSource(*rawMessage) + if err != nil { + return nil, err + } + odsArray[index] = ods + } + return odsArray, nil +} + +// MarshalJSON is the custom marshaler for OutputDataSource. +func (ods OutputDataSource) MarshalJSON() ([]byte, error) { + ods.Type = TypeOutputDataSource + objectMap := make(map[string]interface{}) + if ods.Type != "" { + objectMap["type"] = ods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return &ods, true +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. +func (ods OutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &ods, true +} + +// OutputListResult object containing a list of outputs under a streaming job. +type OutputListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of outputs under a streaming job. Populated by a 'List' operation. + Value *[]Output `json:"value,omitempty"` + // NextLink - READ-ONLY; The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// OutputListResultIterator provides access to a complete listing of Output values. +type OutputListResultIterator struct { + i int + page OutputListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *OutputListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *OutputListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter OutputListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter OutputListResultIterator) Response() OutputListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter OutputListResultIterator) Value() Output { + if !iter.page.NotDone() { + return Output{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the OutputListResultIterator type. +func NewOutputListResultIterator(page OutputListResultPage) OutputListResultIterator { + return OutputListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (olr OutputListResult) IsEmpty() bool { + return olr.Value == nil || len(*olr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (olr OutputListResult) hasNextLink() bool { + return olr.NextLink != nil && len(*olr.NextLink) != 0 +} + +// outputListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (olr OutputListResult) outputListResultPreparer(ctx context.Context) (*http.Request, error) { + if !olr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(olr.NextLink))) +} + +// OutputListResultPage contains a page of Output values. +type OutputListResultPage struct { + fn func(context.Context, OutputListResult) (OutputListResult, error) + olr OutputListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *OutputListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.olr) + if err != nil { + return err + } + page.olr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *OutputListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page OutputListResultPage) NotDone() bool { + return !page.olr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page OutputListResultPage) Response() OutputListResult { + return page.olr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page OutputListResultPage) Values() []Output { + if page.olr.IsEmpty() { + return nil + } + return *page.olr.Value +} + +// Creates a new instance of the OutputListResultPage type. +func NewOutputListResultPage(getNextPage func(context.Context, OutputListResult) (OutputListResult, error)) OutputListResultPage { + return OutputListResultPage{fn: getNextPage} +} + +// OutputProperties the properties that are associated with an output. +type OutputProperties struct { + // Datasource - Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests. + Datasource BasicOutputDataSource `json:"datasource,omitempty"` + TimeWindow *string `json:"timeWindow,omitempty"` + SizeWindow *float64 `json:"sizeWindow,omitempty"` + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - READ-ONLY; The current entity tag for the output. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` +} + +// MarshalJSON is the custom marshaler for OutputProperties. +func (op OutputProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + objectMap["datasource"] = op.Datasource + if op.TimeWindow != nil { + objectMap["timeWindow"] = op.TimeWindow + } + if op.SizeWindow != nil { + objectMap["sizeWindow"] = op.SizeWindow + } + objectMap["serialization"] = op.Serialization + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for OutputProperties struct. +func (op *OutputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "datasource": + if v != nil { + datasource, err := unmarshalBasicOutputDataSource(*v) + if err != nil { + return err + } + op.Datasource = datasource + } + case "timeWindow": + if v != nil { + var timeWindow string + err = json.Unmarshal(*v, &timeWindow) + if err != nil { + return err + } + op.TimeWindow = &timeWindow + } + case "sizeWindow": + if v != nil { + var sizeWindow float64 + err = json.Unmarshal(*v, &sizeWindow) + if err != nil { + return err + } + op.SizeWindow = &sizeWindow + } + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + op.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + op.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + op.Etag = &etag + } + } + } + + return nil +} + +// OutputsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type OutputsTestFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *OutputsTestFuture) Result(client OutputsClient) (rts ResourceTestStatus, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsTestFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.OutputsTestFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { + rts, err = client.TestResponder(rts.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsTestFuture", "Result", rts.Response.Response, "Failure responding to request") + } + } + return +} + +// ParquetSerialization describes how data from an input is serialized or how data is serialized when written +// to an output in Parquet format. +type ParquetSerialization struct { + // Properties - The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. + Properties interface{} `json:"properties,omitempty"` + // Type - Possible values include: 'TypeSerialization', 'TypeParquet', 'TypeCustomClr', 'TypeCsv', 'TypeJSON', 'TypeAvro' + Type TypeBasicSerialization `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ParquetSerialization. +func (ps ParquetSerialization) MarshalJSON() ([]byte, error) { + ps.Type = TypeParquet + objectMap := make(map[string]interface{}) + if ps.Properties != nil { + objectMap["properties"] = ps.Properties + } + if ps.Type != "" { + objectMap["type"] = ps.Type + } + return json.Marshal(objectMap) +} + +// AsParquetSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { + return &ps, true +} + +// AsCustomClrSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsCustomClrSerialization() (*CustomClrSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsAvroSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsSerialization() (*Serialization, bool) { + return nil, false +} + +// AsBasicSerialization is the BasicSerialization implementation for ParquetSerialization. +func (ps ParquetSerialization) AsBasicSerialization() (BasicSerialization, bool) { + return &ps, true +} + +// PowerBIOutputDataSource describes a Power BI output data source. +type PowerBIOutputDataSource struct { + // PowerBIOutputDataSourceProperties - The properties that are associated with a Power BI output. Required on PUT (CreateOrReplace) requests. + *PowerBIOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) MarshalJSON() ([]byte, error) { + pbods.Type = TypePowerBI + objectMap := make(map[string]interface{}) + if pbods.PowerBIOutputDataSourceProperties != nil { + objectMap["properties"] = pbods.PowerBIOutputDataSourceProperties + } + if pbods.Type != "" { + objectMap["type"] = pbods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return &pbods, true +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. +func (pbods PowerBIOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &pbods, true +} + +// UnmarshalJSON is the custom unmarshaler for PowerBIOutputDataSource struct. +func (pbods *PowerBIOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var powerBIOutputDataSourceProperties PowerBIOutputDataSourceProperties + err = json.Unmarshal(*v, &powerBIOutputDataSourceProperties) + if err != nil { + return err + } + pbods.PowerBIOutputDataSourceProperties = &powerBIOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + pbods.Type = typeVar + } + } + } + + return nil +} + +// PowerBIOutputDataSourceProperties the properties that are associated with a Power BI output. +type PowerBIOutputDataSourceProperties struct { + // Dataset - The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + Dataset *string `json:"dataset,omitempty"` + // Table - The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. + Table *string `json:"table,omitempty"` + // GroupID - The ID of the Power BI group. + GroupID *string `json:"groupId,omitempty"` + // GroupName - The name of the Power BI group. Use this property to help remember which specific Power BI group id was used. + GroupName *string `json:"groupName,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` + // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + RefreshToken *string `json:"refreshToken,omitempty"` + // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` + // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` +} + +// PrivateEndpoint complete information about the private endpoint. +type PrivateEndpoint struct { + autorest.Response `json:"-"` + // Properties - The properties associated with a private endpoint. + Properties *PrivateEndpointProperties `json:"properties,omitempty"` + // Etag - READ-ONLY; Unique opaque string (generally a GUID) that represents the metadata state of the resource (private endpoint) and changes whenever the resource is updated. Required on PUT (CreateOrUpdate) requests. + Etag *string `json:"etag,omitempty"` + // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; The name of the resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for PrivateEndpoint. +func (peVar PrivateEndpoint) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if peVar.Properties != nil { + objectMap["properties"] = peVar.Properties + } + return json.Marshal(objectMap) +} + +// PrivateEndpointListResult a list of private endpoints. +type PrivateEndpointListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of private endpoints. + Value *[]PrivateEndpoint `json:"value,omitempty"` + // NextLink - READ-ONLY; The URL to fetch the next set of private endpoints. + NextLink *string `json:"nextLink,omitempty"` +} + +// PrivateEndpointListResultIterator provides access to a complete listing of PrivateEndpoint values. +type PrivateEndpointListResultIterator struct { + i int + page PrivateEndpointListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *PrivateEndpointListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *PrivateEndpointListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter PrivateEndpointListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter PrivateEndpointListResultIterator) Response() PrivateEndpointListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter PrivateEndpointListResultIterator) Value() PrivateEndpoint { + if !iter.page.NotDone() { + return PrivateEndpoint{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the PrivateEndpointListResultIterator type. +func NewPrivateEndpointListResultIterator(page PrivateEndpointListResultPage) PrivateEndpointListResultIterator { + return PrivateEndpointListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (pelr PrivateEndpointListResult) IsEmpty() bool { + return pelr.Value == nil || len(*pelr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (pelr PrivateEndpointListResult) hasNextLink() bool { + return pelr.NextLink != nil && len(*pelr.NextLink) != 0 +} + +// privateEndpointListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (pelr PrivateEndpointListResult) privateEndpointListResultPreparer(ctx context.Context) (*http.Request, error) { + if !pelr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(pelr.NextLink))) +} + +// PrivateEndpointListResultPage contains a page of PrivateEndpoint values. +type PrivateEndpointListResultPage struct { + fn func(context.Context, PrivateEndpointListResult) (PrivateEndpointListResult, error) + pelr PrivateEndpointListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *PrivateEndpointListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.pelr) + if err != nil { + return err + } + page.pelr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *PrivateEndpointListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page PrivateEndpointListResultPage) NotDone() bool { + return !page.pelr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page PrivateEndpointListResultPage) Response() PrivateEndpointListResult { + return page.pelr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page PrivateEndpointListResultPage) Values() []PrivateEndpoint { + if page.pelr.IsEmpty() { + return nil + } + return *page.pelr.Value +} + +// Creates a new instance of the PrivateEndpointListResultPage type. +func NewPrivateEndpointListResultPage(getNextPage func(context.Context, PrivateEndpointListResult) (PrivateEndpointListResult, error)) PrivateEndpointListResultPage { + return PrivateEndpointListResultPage{fn: getNextPage} +} + +// PrivateEndpointProperties the properties associated with a private endpoint. +type PrivateEndpointProperties struct { + // CreatedDate - READ-ONLY; The date when this private endpoint was created. + CreatedDate *string `json:"createdDate,omitempty"` + // ManualPrivateLinkServiceConnections - A list of connections to the remote resource. Immutable after it is set. + ManualPrivateLinkServiceConnections *[]PrivateLinkServiceConnection `json:"manualPrivateLinkServiceConnections,omitempty"` +} + +// MarshalJSON is the custom marshaler for PrivateEndpointProperties. +func (pep PrivateEndpointProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if pep.ManualPrivateLinkServiceConnections != nil { + objectMap["manualPrivateLinkServiceConnections"] = pep.ManualPrivateLinkServiceConnections + } + return json.Marshal(objectMap) +} + +// PrivateEndpointsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type PrivateEndpointsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *PrivateEndpointsDeleteFuture) Result(client PrivateEndpointsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.PrivateEndpointsDeleteFuture") + return + } + ar.Response = future.Response() + return +} + +// PrivateLinkConnectionState a collection of read-only information about the state of the connection to the +// private remote resource. +type PrivateLinkConnectionState struct { + // Status - READ-ONLY; Indicates whether the connection has been Approved/Rejected/Removed by the owner of the remote resource/service. + Status *string `json:"status,omitempty"` + // Description - READ-ONLY; The reason for approval/rejection of the connection. + Description *string `json:"description,omitempty"` + // ActionsRequired - READ-ONLY; A message indicating if changes on the service provider require any updates on the consumer. + ActionsRequired *string `json:"actionsRequired,omitempty"` +} + +// PrivateLinkServiceConnection a grouping of information about the connection to the remote resource. +type PrivateLinkServiceConnection struct { + // PrivateLinkServiceConnectionProperties - Bag of properties defining a privatelinkServiceConnection. + *PrivateLinkServiceConnectionProperties `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for PrivateLinkServiceConnection. +func (plsc PrivateLinkServiceConnection) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if plsc.PrivateLinkServiceConnectionProperties != nil { + objectMap["properties"] = plsc.PrivateLinkServiceConnectionProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for PrivateLinkServiceConnection struct. +func (plsc *PrivateLinkServiceConnection) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var privateLinkServiceConnectionProperties PrivateLinkServiceConnectionProperties + err = json.Unmarshal(*v, &privateLinkServiceConnectionProperties) + if err != nil { + return err + } + plsc.PrivateLinkServiceConnectionProperties = &privateLinkServiceConnectionProperties + } + } + } + + return nil +} + +// PrivateLinkServiceConnectionProperties bag of properties defining a privatelinkServiceConnection. +type PrivateLinkServiceConnectionProperties struct { + // PrivateLinkServiceID - The resource id of the private link service. Required on PUT (CreateOrUpdate) requests. + PrivateLinkServiceID *string `json:"privateLinkServiceId,omitempty"` + // GroupIds - The ID(s) of the group(s) obtained from the remote resource that this private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. + GroupIds *[]string `json:"groupIds,omitempty"` + // RequestMessage - A message passed to the owner of the remote resource with this connection request. Restricted to 140 chars. + RequestMessage *string `json:"requestMessage,omitempty"` + // PrivateLinkServiceConnectionState - A collection of read-only information about the state of the connection to the private remote resource. + PrivateLinkServiceConnectionState *PrivateLinkConnectionState `json:"privateLinkServiceConnectionState,omitempty"` +} + +// ProxyResource the resource model definition for a ARM proxy resource. It will have everything other than +// required location and tags +type ProxyResource struct { + // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; The name of the resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + Type *string `json:"type,omitempty"` +} + +// BasicReferenceInputDataSource describes an input data source that contains reference data. +type BasicReferenceInputDataSource interface { + AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) + AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) + AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) +} + +// ReferenceInputDataSource describes an input data source that contains reference data. +type ReferenceInputDataSource struct { + // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase' + Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` +} + +func unmarshalBasicReferenceInputDataSource(body []byte) (BasicReferenceInputDataSource, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob): + var brids BlobReferenceInputDataSource + err := json.Unmarshal(body, &brids) + return brids, err + case string(TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase): + var asrids AzureSQLReferenceInputDataSource + err := json.Unmarshal(body, &asrids) + return asrids, err + default: + var rids ReferenceInputDataSource + err := json.Unmarshal(body, &rids) + return rids, err + } +} +func unmarshalBasicReferenceInputDataSourceArray(body []byte) ([]BasicReferenceInputDataSource, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + ridsArray := make([]BasicReferenceInputDataSource, len(rawMessages)) + + for index, rawMessage := range rawMessages { + rids, err := unmarshalBasicReferenceInputDataSource(*rawMessage) + if err != nil { + return nil, err + } + ridsArray[index] = rids + } + return ridsArray, nil +} + +// MarshalJSON is the custom marshaler for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) MarshalJSON() ([]byte, error) { + rids.Type = TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource + objectMap := make(map[string]interface{}) + if rids.Type != "" { + objectMap["type"] = rids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { + return nil, false +} + +// AsAzureSQLReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) { + return nil, false +} + +// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { + return &rids, true +} + +// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. +func (rids ReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { + return &rids, true +} + +// ReferenceInputProperties the properties that are associated with an input containing reference data. +type ReferenceInputProperties struct { + // Datasource - Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. + Datasource BasicReferenceInputDataSource `json:"datasource,omitempty"` + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - READ-ONLY; The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + Compression *Compression `json:"compression,omitempty"` + // PartitionKey - partitionKey Describes a key in the input data which is used for partitioning the input data + PartitionKey *string `json:"partitionKey,omitempty"` + // Type - Possible values include: 'TypeInputProperties', 'TypeStream', 'TypeReference' + Type TypeBasicInputProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ReferenceInputProperties. +func (rip ReferenceInputProperties) MarshalJSON() ([]byte, error) { + rip.Type = TypeReference + objectMap := make(map[string]interface{}) + objectMap["datasource"] = rip.Datasource + objectMap["serialization"] = rip.Serialization + if rip.Compression != nil { + objectMap["compression"] = rip.Compression + } + if rip.PartitionKey != nil { + objectMap["partitionKey"] = rip.PartitionKey + } + if rip.Type != "" { + objectMap["type"] = rip.Type + } + return json.Marshal(objectMap) +} + +// AsStreamInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { + return nil, false +} + +// AsReferenceInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { + return &rip, true +} + +// AsInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsInputProperties() (*InputProperties, bool) { + return nil, false +} + +// AsBasicInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. +func (rip ReferenceInputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { + return &rip, true +} + +// UnmarshalJSON is the custom unmarshaler for ReferenceInputProperties struct. +func (rip *ReferenceInputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "datasource": + if v != nil { + datasource, err := unmarshalBasicReferenceInputDataSource(*v) + if err != nil { + return err + } + rip.Datasource = datasource + } + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + rip.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + rip.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + rip.Etag = &etag + } + case "compression": + if v != nil { + var compression Compression + err = json.Unmarshal(*v, &compression) + if err != nil { + return err + } + rip.Compression = &compression + } + case "partitionKey": + if v != nil { + var partitionKey string + err = json.Unmarshal(*v, &partitionKey) + if err != nil { + return err + } + rip.PartitionKey = &partitionKey + } + case "type": + if v != nil { + var typeVar TypeBasicInputProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + rip.Type = typeVar + } + } + } + + return nil +} + +// Resource ... +type Resource struct { + // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; The name of the resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + Type *string `json:"type,omitempty"` +} + +// ResourceTestStatus describes the status of the test operation along with error information, if applicable. +type ResourceTestStatus struct { + autorest.Response `json:"-"` + // Status - READ-ONLY; The status of the test operation. + Status *string `json:"status,omitempty"` + // Error - READ-ONLY; Describes the error that occurred. + Error *ErrorResponse `json:"error,omitempty"` +} + +// ScalarFunctionProperties the properties that are associated with a scalar function. +type ScalarFunctionProperties struct { + // Etag - READ-ONLY; The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + *FunctionConfiguration `json:"properties,omitempty"` + // Type - Possible values include: 'TypeFunctionProperties', 'TypeScalar', 'TypeAggregate' + Type TypeBasicFunctionProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) MarshalJSON() ([]byte, error) { + sfp.Type = TypeScalar + objectMap := make(map[string]interface{}) + if sfp.FunctionConfiguration != nil { + objectMap["properties"] = sfp.FunctionConfiguration + } + if sfp.Type != "" { + objectMap["type"] = sfp.Type + } + return json.Marshal(objectMap) +} + +// AsScalarFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { + return &sfp, true +} + +// AsAggregateFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) { + return nil, false +} + +// AsFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { + return nil, false +} + +// AsBasicFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. +func (sfp ScalarFunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { + return &sfp, true +} + +// BasicSerialization describes how data from an input is serialized or how data is serialized when written to an +// output. +type BasicSerialization interface { + AsParquetSerialization() (*ParquetSerialization, bool) + AsCustomClrSerialization() (*CustomClrSerialization, bool) + AsCsvSerialization() (*CsvSerialization, bool) + AsJSONSerialization() (*JSONSerialization, bool) + AsAvroSerialization() (*AvroSerialization, bool) + AsSerialization() (*Serialization, bool) +} + +// Serialization describes how data from an input is serialized or how data is serialized when written to an +// output. +type Serialization struct { + // Type - Possible values include: 'TypeSerialization', 'TypeParquet', 'TypeCustomClr', 'TypeCsv', 'TypeJSON', 'TypeAvro' + Type TypeBasicSerialization `json:"type,omitempty"` +} + +func unmarshalBasicSerialization(body []byte) (BasicSerialization, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeParquet): + var ps ParquetSerialization + err := json.Unmarshal(body, &ps) + return ps, err + case string(TypeCustomClr): + var ccs CustomClrSerialization + err := json.Unmarshal(body, &ccs) + return ccs, err + case string(TypeCsv): + var cs CsvSerialization + err := json.Unmarshal(body, &cs) + return cs, err + case string(TypeJSON): + var js JSONSerialization + err := json.Unmarshal(body, &js) + return js, err + case string(TypeAvro): + var as AvroSerialization + err := json.Unmarshal(body, &as) + return as, err + default: + var s Serialization + err := json.Unmarshal(body, &s) + return s, err + } +} +func unmarshalBasicSerializationArray(body []byte) ([]BasicSerialization, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + sArray := make([]BasicSerialization, len(rawMessages)) + + for index, rawMessage := range rawMessages { + s, err := unmarshalBasicSerialization(*rawMessage) + if err != nil { + return nil, err + } + sArray[index] = s + } + return sArray, nil +} + +// MarshalJSON is the custom marshaler for Serialization. +func (s Serialization) MarshalJSON() ([]byte, error) { + s.Type = TypeSerialization + objectMap := make(map[string]interface{}) + if s.Type != "" { + objectMap["type"] = s.Type + } + return json.Marshal(objectMap) +} + +// AsParquetSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsParquetSerialization() (*ParquetSerialization, bool) { + return nil, false +} + +// AsCustomClrSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsCustomClrSerialization() (*CustomClrSerialization, bool) { + return nil, false +} + +// AsCsvSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsCsvSerialization() (*CsvSerialization, bool) { + return nil, false +} + +// AsJSONSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsJSONSerialization() (*JSONSerialization, bool) { + return nil, false +} + +// AsAvroSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsAvroSerialization() (*AvroSerialization, bool) { + return nil, false +} + +// AsSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsSerialization() (*Serialization, bool) { + return &s, true +} + +// AsBasicSerialization is the BasicSerialization implementation for Serialization. +func (s Serialization) AsBasicSerialization() (BasicSerialization, bool) { + return &s, true +} + +// ServiceBusDataSourceProperties the common properties that are associated with Service Bus data sources +// (Queues, Topics, Event Hubs, etc.). +type ServiceBusDataSourceProperties struct { + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// ServiceBusQueueOutputDataSource describes a Service Bus Queue output data source. +type ServiceBusQueueOutputDataSource struct { + // ServiceBusQueueOutputDataSourceProperties - The properties that are associated with a Service Bus Queue output. Required on PUT (CreateOrReplace) requests. + *ServiceBusQueueOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) MarshalJSON() ([]byte, error) { + sbqods.Type = TypeMicrosoftServiceBusQueue + objectMap := make(map[string]interface{}) + if sbqods.ServiceBusQueueOutputDataSourceProperties != nil { + objectMap["properties"] = sbqods.ServiceBusQueueOutputDataSourceProperties + } + if sbqods.Type != "" { + objectMap["type"] = sbqods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return &sbqods, true +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return nil, false +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. +func (sbqods ServiceBusQueueOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &sbqods, true +} + +// UnmarshalJSON is the custom unmarshaler for ServiceBusQueueOutputDataSource struct. +func (sbqods *ServiceBusQueueOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var serviceBusQueueOutputDataSourceProperties ServiceBusQueueOutputDataSourceProperties + err = json.Unmarshal(*v, &serviceBusQueueOutputDataSourceProperties) + if err != nil { + return err + } + sbqods.ServiceBusQueueOutputDataSourceProperties = &serviceBusQueueOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sbqods.Type = typeVar + } + } + } + + return nil +} + +// ServiceBusQueueOutputDataSourceProperties the properties that are associated with a Service Bus Queue +// output. +type ServiceBusQueueOutputDataSourceProperties struct { + // QueueName - The name of the Service Bus Queue. Required on PUT (CreateOrReplace) requests. + QueueName *string `json:"queueName,omitempty"` + // PropertyColumns - A string array of the names of output columns to be attached to Service Bus messages as custom properties. + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + SystemPropertyColumns map[string]*string `json:"systemPropertyColumns"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// MarshalJSON is the custom marshaler for ServiceBusQueueOutputDataSourceProperties. +func (sbqodsp ServiceBusQueueOutputDataSourceProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sbqodsp.QueueName != nil { + objectMap["queueName"] = sbqodsp.QueueName + } + if sbqodsp.PropertyColumns != nil { + objectMap["propertyColumns"] = sbqodsp.PropertyColumns + } + if sbqodsp.SystemPropertyColumns != nil { + objectMap["systemPropertyColumns"] = sbqodsp.SystemPropertyColumns + } + if sbqodsp.ServiceBusNamespace != nil { + objectMap["serviceBusNamespace"] = sbqodsp.ServiceBusNamespace + } + if sbqodsp.SharedAccessPolicyName != nil { + objectMap["sharedAccessPolicyName"] = sbqodsp.SharedAccessPolicyName + } + if sbqodsp.SharedAccessPolicyKey != nil { + objectMap["sharedAccessPolicyKey"] = sbqodsp.SharedAccessPolicyKey + } + if sbqodsp.AuthenticationMode != "" { + objectMap["authenticationMode"] = sbqodsp.AuthenticationMode + } + return json.Marshal(objectMap) +} + +// ServiceBusTopicOutputDataSource describes a Service Bus Topic output data source. +type ServiceBusTopicOutputDataSource struct { + // ServiceBusTopicOutputDataSourceProperties - The properties that are associated with a Service Bus Topic output. Required on PUT (CreateOrReplace) requests. + *ServiceBusTopicOutputDataSourceProperties `json:"properties,omitempty"` + // Type - Possible values include: 'TypeOutputDataSource', 'TypeMicrosoftStorageBlob', 'TypeMicrosoftStorageTable', 'TypeMicrosoftServiceBusEventHub', 'TypeMicrosoftEventHubEventHub', 'TypeMicrosoftSQLServerDatabase', 'TypeMicrosoftSQLServerDataWarehouse', 'TypeMicrosoftStorageDocumentDB', 'TypeMicrosoftAzureFunction', 'TypeMicrosoftServiceBusQueue', 'TypeMicrosoftServiceBusTopic', 'TypePowerBI', 'TypeMicrosoftDataLakeAccounts' + Type TypeBasicOutputDataSource `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) MarshalJSON() ([]byte, error) { + sbtods.Type = TypeMicrosoftServiceBusTopic + objectMap := make(map[string]interface{}) + if sbtods.ServiceBusTopicOutputDataSourceProperties != nil { + objectMap["properties"] = sbtods.ServiceBusTopicOutputDataSourceProperties + } + if sbtods.Type != "" { + objectMap["type"] = sbtods.Type + } + return json.Marshal(objectMap) +} + +// AsBlobOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { + return nil, false +} + +// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { + return nil, false +} + +// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { + return nil, false +} + +// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { + return nil, false +} + +// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { + return nil, false +} + +// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { + return nil, false +} + +// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { + return nil, false +} + +// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { + return nil, false +} + +// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { + return &sbtods, true +} + +// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { + return nil, false +} + +// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { + return nil, false +} + +// AsOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { + return nil, false +} + +// AsBasicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. +func (sbtods ServiceBusTopicOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { + return &sbtods, true +} + +// UnmarshalJSON is the custom unmarshaler for ServiceBusTopicOutputDataSource struct. +func (sbtods *ServiceBusTopicOutputDataSource) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var serviceBusTopicOutputDataSourceProperties ServiceBusTopicOutputDataSourceProperties + err = json.Unmarshal(*v, &serviceBusTopicOutputDataSourceProperties) + if err != nil { + return err + } + sbtods.ServiceBusTopicOutputDataSourceProperties = &serviceBusTopicOutputDataSourceProperties + } + case "type": + if v != nil { + var typeVar TypeBasicOutputDataSource + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sbtods.Type = typeVar + } + } + } + + return nil +} + +// ServiceBusTopicOutputDataSourceProperties the properties that are associated with a Service Bus Topic +// output. +type ServiceBusTopicOutputDataSourceProperties struct { + // TopicName - The name of the Service Bus Topic. Required on PUT (CreateOrReplace) requests. + TopicName *string `json:"topicName,omitempty"` + // PropertyColumns - A string array of the names of output columns to be attached to Service Bus messages as custom properties. + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + SystemPropertyColumns map[string]*string `json:"systemPropertyColumns"` + // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + // AuthenticationMode - Authentication Mode. Possible values include: 'Msi', 'UserToken', 'ConnectionString' + AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` +} + +// MarshalJSON is the custom marshaler for ServiceBusTopicOutputDataSourceProperties. +func (sbtodsp ServiceBusTopicOutputDataSourceProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sbtodsp.TopicName != nil { + objectMap["topicName"] = sbtodsp.TopicName + } + if sbtodsp.PropertyColumns != nil { + objectMap["propertyColumns"] = sbtodsp.PropertyColumns + } + if sbtodsp.SystemPropertyColumns != nil { + objectMap["systemPropertyColumns"] = sbtodsp.SystemPropertyColumns + } + if sbtodsp.ServiceBusNamespace != nil { + objectMap["serviceBusNamespace"] = sbtodsp.ServiceBusNamespace + } + if sbtodsp.SharedAccessPolicyName != nil { + objectMap["sharedAccessPolicyName"] = sbtodsp.SharedAccessPolicyName + } + if sbtodsp.SharedAccessPolicyKey != nil { + objectMap["sharedAccessPolicyKey"] = sbtodsp.SharedAccessPolicyKey + } + if sbtodsp.AuthenticationMode != "" { + objectMap["authenticationMode"] = sbtodsp.AuthenticationMode + } + return json.Marshal(objectMap) +} + +// StartStreamingJobParameters parameters supplied to the Start Streaming Job operation. +type StartStreamingJobParameters struct { + // OutputStartMode - Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: 'JobStartTime', 'CustomTime', 'LastOutputEventTime' + OutputStartMode OutputStartMode `json:"outputStartMode,omitempty"` + // OutputStartTime - Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. + OutputStartTime *date.Time `json:"outputStartTime,omitempty"` +} + +// StorageAccount the properties that are associated with an Azure Storage account +type StorageAccount struct { + // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountName *string `json:"accountName,omitempty"` + // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. + AccountKey *string `json:"accountKey,omitempty"` +} + +// StreamingJob a streaming job object, containing all information associated with the named streaming job. +type StreamingJob struct { + autorest.Response `json:"-"` + // StreamingJobProperties - The properties that are associated with a streaming job. Required on PUT (CreateOrReplace) requests. + *StreamingJobProperties `json:"properties,omitempty"` + // Identity - Describes the system-assigned managed identity assigned to this job that can be used to authenticate with inputs and outputs. + Identity *Identity `json:"identity,omitempty"` + // Tags - Resource tags. + Tags map[string]*string `json:"tags"` + // Location - The geo-location where the resource lives + Location *string `json:"location,omitempty"` + // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; The name of the resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for StreamingJob. +func (sj StreamingJob) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sj.StreamingJobProperties != nil { + objectMap["properties"] = sj.StreamingJobProperties + } + if sj.Identity != nil { + objectMap["identity"] = sj.Identity + } + if sj.Tags != nil { + objectMap["tags"] = sj.Tags + } + if sj.Location != nil { + objectMap["location"] = sj.Location + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for StreamingJob struct. +func (sj *StreamingJob) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var streamingJobProperties StreamingJobProperties + err = json.Unmarshal(*v, &streamingJobProperties) + if err != nil { + return err + } + sj.StreamingJobProperties = &streamingJobProperties + } + case "identity": + if v != nil { + var identity Identity + err = json.Unmarshal(*v, &identity) + if err != nil { + return err + } + sj.Identity = &identity + } + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + sj.Tags = tags + } + case "location": + if v != nil { + var location string + err = json.Unmarshal(*v, &location) + if err != nil { + return err + } + sj.Location = &location + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sj.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sj.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sj.Type = &typeVar + } + } + } + + return nil +} + +// StreamingJobListResult object containing a list of streaming jobs. +type StreamingJobListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; A list of streaming jobs. Populated by a 'List' operation. + Value *[]StreamingJob `json:"value,omitempty"` + // NextLink - READ-ONLY; The link (url) to the next page of results. + NextLink *string `json:"nextLink,omitempty"` +} + +// StreamingJobListResultIterator provides access to a complete listing of StreamingJob values. +type StreamingJobListResultIterator struct { + i int + page StreamingJobListResultPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *StreamingJobListResultIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobListResultIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *StreamingJobListResultIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter StreamingJobListResultIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter StreamingJobListResultIterator) Response() StreamingJobListResult { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter StreamingJobListResultIterator) Value() StreamingJob { + if !iter.page.NotDone() { + return StreamingJob{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the StreamingJobListResultIterator type. +func NewStreamingJobListResultIterator(page StreamingJobListResultPage) StreamingJobListResultIterator { + return StreamingJobListResultIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (sjlr StreamingJobListResult) IsEmpty() bool { + return sjlr.Value == nil || len(*sjlr.Value) == 0 +} + +// hasNextLink returns true if the NextLink is not empty. +func (sjlr StreamingJobListResult) hasNextLink() bool { + return sjlr.NextLink != nil && len(*sjlr.NextLink) != 0 +} + +// streamingJobListResultPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sjlr StreamingJobListResult) streamingJobListResultPreparer(ctx context.Context) (*http.Request, error) { + if !sjlr.hasNextLink() { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sjlr.NextLink))) +} + +// StreamingJobListResultPage contains a page of StreamingJob values. +type StreamingJobListResultPage struct { + fn func(context.Context, StreamingJobListResult) (StreamingJobListResult, error) + sjlr StreamingJobListResult +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *StreamingJobListResultPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobListResultPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + for { + next, err := page.fn(ctx, page.sjlr) + if err != nil { + return err + } + page.sjlr = next + if !next.hasNextLink() || !next.IsEmpty() { + break + } + } + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *StreamingJobListResultPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page StreamingJobListResultPage) NotDone() bool { + return !page.sjlr.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page StreamingJobListResultPage) Response() StreamingJobListResult { + return page.sjlr +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page StreamingJobListResultPage) Values() []StreamingJob { + if page.sjlr.IsEmpty() { + return nil + } + return *page.sjlr.Value +} + +// Creates a new instance of the StreamingJobListResultPage type. +func NewStreamingJobListResultPage(getNextPage func(context.Context, StreamingJobListResult) (StreamingJobListResult, error)) StreamingJobListResultPage { + return StreamingJobListResultPage{fn: getNextPage} +} + +// StreamingJobProperties the properties that are associated with a streaming job. +type StreamingJobProperties struct { + // Sku - Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. + Sku *StreamingJobSku `json:"sku,omitempty"` + // JobID - READ-ONLY; A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. + JobID *string `json:"jobId,omitempty"` + // ProvisioningState - READ-ONLY; Describes the provisioning status of the streaming job. + ProvisioningState *string `json:"provisioningState,omitempty"` + // JobState - READ-ONLY; Describes the state of the streaming job. + JobState *string `json:"jobState,omitempty"` + // JobType - Describes the type of the job. Valid modes are `Cloud` and 'Edge'. Possible values include: 'Cloud', 'Edge' + JobType JobType `json:"jobType,omitempty"` + // OutputStartMode - This property should only be utilized when it is desired that the job be started immediately upon creation. Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: 'JobStartTime', 'CustomTime', 'LastOutputEventTime' + OutputStartMode OutputStartMode `json:"outputStartMode,omitempty"` + // OutputStartTime - Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. + OutputStartTime *date.Time `json:"outputStartTime,omitempty"` + // LastOutputEventTime - READ-ONLY; Value is either an ISO-8601 formatted timestamp indicating the last output event time of the streaming job or null indicating that output has not yet been produced. In case of multiple outputs or multiple streams, this shows the latest value in that set. + LastOutputEventTime *date.Time `json:"lastOutputEventTime,omitempty"` + // EventsOutOfOrderPolicy - Indicates the policy to apply to events that arrive out of order in the input event stream. Possible values include: 'Adjust', 'Drop' + EventsOutOfOrderPolicy EventsOutOfOrderPolicy `json:"eventsOutOfOrderPolicy,omitempty"` + // OutputErrorPolicy - Indicates the policy to apply to events that arrive at the output and cannot be written to the external storage due to being malformed (missing column values, column values of wrong type or size). Possible values include: 'OutputErrorPolicyStop', 'OutputErrorPolicyDrop' + OutputErrorPolicy OutputErrorPolicy `json:"outputErrorPolicy,omitempty"` + // EventsOutOfOrderMaxDelayInSeconds - The maximum tolerable delay in seconds where out-of-order events can be adjusted to be back in order. + EventsOutOfOrderMaxDelayInSeconds *int32 `json:"eventsOutOfOrderMaxDelayInSeconds,omitempty"` + // EventsLateArrivalMaxDelayInSeconds - The maximum tolerable delay in seconds where events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to have a value of -1. + EventsLateArrivalMaxDelayInSeconds *int32 `json:"eventsLateArrivalMaxDelayInSeconds,omitempty"` + // DataLocale - The data locale of the stream analytics job. Value should be the name of a supported .NET Culture from the set https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none specified. + DataLocale *string `json:"dataLocale,omitempty"` + // CompatibilityLevel - Controls certain runtime behaviors of the streaming job. Possible values include: 'OneFullStopZero' + CompatibilityLevel CompatibilityLevel `json:"compatibilityLevel,omitempty"` + // CreatedDate - READ-ONLY; Value is an ISO-8601 formatted UTC timestamp indicating when the streaming job was created. + CreatedDate *date.Time `json:"createdDate,omitempty"` + // Inputs - A list of one or more inputs to the streaming job. The name property for each input is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual input. + Inputs *[]Input `json:"inputs,omitempty"` + // Transformation - Indicates the query and the number of streaming units to use for the streaming job. The name property of the transformation is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. + Transformation *Transformation `json:"transformation,omitempty"` + // Outputs - A list of one or more outputs for the streaming job. The name property for each output is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual output. + Outputs *[]Output `json:"outputs,omitempty"` + // Functions - A list of one or more functions for the streaming job. The name property for each function is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. + Functions *[]Function `json:"functions,omitempty"` + // Etag - READ-ONLY; The current entity tag for the streaming job. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + JobStorageAccount *JobStorageAccount `json:"jobStorageAccount,omitempty"` + // ContentStoragePolicy - READ-ONLY; Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. Possible values include: 'ContentStoragePolicySystemAccount', 'ContentStoragePolicyJobStorageAccount' + ContentStoragePolicy ContentStoragePolicy `json:"contentStoragePolicy,omitempty"` + // Externals - The storage account where the custom code artifacts are located. + Externals *External `json:"externals,omitempty"` + // Cluster - The cluster which streaming jobs will run on. + Cluster *ClusterInfo `json:"cluster,omitempty"` +} + +// MarshalJSON is the custom marshaler for StreamingJobProperties. +func (sjp StreamingJobProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sjp.Sku != nil { + objectMap["sku"] = sjp.Sku + } + if sjp.JobType != "" { + objectMap["jobType"] = sjp.JobType + } + if sjp.OutputStartMode != "" { + objectMap["outputStartMode"] = sjp.OutputStartMode + } + if sjp.OutputStartTime != nil { + objectMap["outputStartTime"] = sjp.OutputStartTime + } + if sjp.EventsOutOfOrderPolicy != "" { + objectMap["eventsOutOfOrderPolicy"] = sjp.EventsOutOfOrderPolicy + } + if sjp.OutputErrorPolicy != "" { + objectMap["outputErrorPolicy"] = sjp.OutputErrorPolicy + } + if sjp.EventsOutOfOrderMaxDelayInSeconds != nil { + objectMap["eventsOutOfOrderMaxDelayInSeconds"] = sjp.EventsOutOfOrderMaxDelayInSeconds + } + if sjp.EventsLateArrivalMaxDelayInSeconds != nil { + objectMap["eventsLateArrivalMaxDelayInSeconds"] = sjp.EventsLateArrivalMaxDelayInSeconds + } + if sjp.DataLocale != nil { + objectMap["dataLocale"] = sjp.DataLocale + } + if sjp.CompatibilityLevel != "" { + objectMap["compatibilityLevel"] = sjp.CompatibilityLevel + } + if sjp.Inputs != nil { + objectMap["inputs"] = sjp.Inputs + } + if sjp.Transformation != nil { + objectMap["transformation"] = sjp.Transformation + } + if sjp.Outputs != nil { + objectMap["outputs"] = sjp.Outputs + } + if sjp.Functions != nil { + objectMap["functions"] = sjp.Functions + } + if sjp.JobStorageAccount != nil { + objectMap["jobStorageAccount"] = sjp.JobStorageAccount + } + if sjp.Externals != nil { + objectMap["externals"] = sjp.Externals + } + if sjp.Cluster != nil { + objectMap["cluster"] = sjp.Cluster + } + return json.Marshal(objectMap) +} + +// StreamingJobsCreateOrReplaceFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type StreamingJobsCreateOrReplaceFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsCreateOrReplaceFuture) Result(client StreamingJobsClient) (sj StreamingJob, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsCreateOrReplaceFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsCreateOrReplaceFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if sj.Response.Response, err = future.GetResult(sender); err == nil && sj.Response.Response.StatusCode != http.StatusNoContent { + sj, err = client.CreateOrReplaceResponder(sj.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsCreateOrReplaceFuture", "Result", sj.Response.Response, "Failure responding to request") + } + } + return +} + +// StreamingJobsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type StreamingJobsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsDeleteFuture) Result(client StreamingJobsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsDeleteFuture") + return + } + ar.Response = future.Response() + return +} + +// StreamingJobSku the properties that are associated with a SKU. +type StreamingJobSku struct { + // Name - The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values include: 'Standard' + Name StreamingJobSkuName `json:"name,omitempty"` +} + +// StreamingJobsStartFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type StreamingJobsStartFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsStartFuture) Result(client StreamingJobsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsStartFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsStartFuture") + return + } + ar.Response = future.Response() + return +} + +// StreamingJobsStopFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type StreamingJobsStopFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *StreamingJobsStopFuture) Result(client StreamingJobsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsStopFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsStopFuture") + return + } + ar.Response = future.Response() + return +} + +// BasicStreamInputDataSource describes an input data source that contains stream data. +type BasicStreamInputDataSource interface { + AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) + AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) + AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) + AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) + AsStreamInputDataSource() (*StreamInputDataSource, bool) +} + +// StreamInputDataSource describes an input data source that contains stream data. +type StreamInputDataSource struct { + // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs' + Type TypeBasicStreamInputDataSource `json:"type,omitempty"` +} + +func unmarshalBasicStreamInputDataSource(body []byte) (BasicStreamInputDataSource, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["type"] { + case string(TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob): + var bsids BlobStreamInputDataSource + err := json.Unmarshal(body, &bsids) + return bsids, err + case string(TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub): + var ehsids EventHubStreamInputDataSource + err := json.Unmarshal(body, &ehsids) + return ehsids, err + case string(TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub): + var ehvsids EventHubV2StreamInputDataSource + err := json.Unmarshal(body, &ehvsids) + return ehvsids, err + case string(TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs): + var ithsids IoTHubStreamInputDataSource + err := json.Unmarshal(body, &ithsids) + return ithsids, err + default: + var sids StreamInputDataSource + err := json.Unmarshal(body, &sids) + return sids, err + } +} +func unmarshalBasicStreamInputDataSourceArray(body []byte) ([]BasicStreamInputDataSource, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + sidsArray := make([]BasicStreamInputDataSource, len(rawMessages)) + + for index, rawMessage := range rawMessages { + sids, err := unmarshalBasicStreamInputDataSource(*rawMessage) + if err != nil { + return nil, err + } + sidsArray[index] = sids + } + return sidsArray, nil +} + +// MarshalJSON is the custom marshaler for StreamInputDataSource. +func (sids StreamInputDataSource) MarshalJSON() ([]byte, error) { + sids.Type = TypeBasicStreamInputDataSourceTypeStreamInputDataSource + objectMap := make(map[string]interface{}) + if sids.Type != "" { + objectMap["type"] = sids.Type + } + return json.Marshal(objectMap) +} + +// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { + return nil, false +} + +// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { + return nil, false +} + +// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { + return nil, false +} + +// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { + return &sids, true +} + +// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. +func (sids StreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { + return &sids, true +} + +// StreamInputProperties the properties that are associated with an input containing stream data. +type StreamInputProperties struct { + // Datasource - Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. + Datasource BasicStreamInputDataSource `json:"datasource,omitempty"` + // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. + Serialization BasicSerialization `json:"serialization,omitempty"` + // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + // Etag - READ-ONLY; The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` + Compression *Compression `json:"compression,omitempty"` + // PartitionKey - partitionKey Describes a key in the input data which is used for partitioning the input data + PartitionKey *string `json:"partitionKey,omitempty"` + // Type - Possible values include: 'TypeInputProperties', 'TypeStream', 'TypeReference' + Type TypeBasicInputProperties `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for StreamInputProperties. +func (sip StreamInputProperties) MarshalJSON() ([]byte, error) { + sip.Type = TypeStream + objectMap := make(map[string]interface{}) + objectMap["datasource"] = sip.Datasource + objectMap["serialization"] = sip.Serialization + if sip.Compression != nil { + objectMap["compression"] = sip.Compression + } + if sip.PartitionKey != nil { + objectMap["partitionKey"] = sip.PartitionKey + } + if sip.Type != "" { + objectMap["type"] = sip.Type + } + return json.Marshal(objectMap) +} + +// AsStreamInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { + return &sip, true +} + +// AsReferenceInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { + return nil, false +} + +// AsInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsInputProperties() (*InputProperties, bool) { + return nil, false +} + +// AsBasicInputProperties is the BasicInputProperties implementation for StreamInputProperties. +func (sip StreamInputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { + return &sip, true +} + +// UnmarshalJSON is the custom unmarshaler for StreamInputProperties struct. +func (sip *StreamInputProperties) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "datasource": + if v != nil { + datasource, err := unmarshalBasicStreamInputDataSource(*v) + if err != nil { + return err + } + sip.Datasource = datasource + } + case "serialization": + if v != nil { + serialization, err := unmarshalBasicSerialization(*v) + if err != nil { + return err + } + sip.Serialization = serialization + } + case "diagnostics": + if v != nil { + var diagnostics Diagnostics + err = json.Unmarshal(*v, &diagnostics) + if err != nil { + return err + } + sip.Diagnostics = &diagnostics + } + case "etag": + if v != nil { + var etag string + err = json.Unmarshal(*v, &etag) + if err != nil { + return err + } + sip.Etag = &etag + } + case "compression": + if v != nil { + var compression Compression + err = json.Unmarshal(*v, &compression) + if err != nil { + return err + } + sip.Compression = &compression + } + case "partitionKey": + if v != nil { + var partitionKey string + err = json.Unmarshal(*v, &partitionKey) + if err != nil { + return err + } + sip.PartitionKey = &partitionKey + } + case "type": + if v != nil { + var typeVar TypeBasicInputProperties + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sip.Type = typeVar + } + } + } + + return nil +} + +// SubResource the base sub-resource model definition. +type SubResource struct { + // ID - READ-ONLY; Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SubResource. +func (sr SubResource) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sr.Name != nil { + objectMap["name"] = sr.Name + } + return json.Marshal(objectMap) +} + +// SubscriptionQuota describes the current quota for the subscription. +type SubscriptionQuota struct { + // SubscriptionQuotaProperties - READ-ONLY; Describes the properties of the quota. + *SubscriptionQuotaProperties `json:"properties,omitempty"` + // ID - READ-ONLY; Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SubscriptionQuota. +func (sq SubscriptionQuota) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if sq.Name != nil { + objectMap["name"] = sq.Name + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for SubscriptionQuota struct. +func (sq *SubscriptionQuota) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var subscriptionQuotaProperties SubscriptionQuotaProperties + err = json.Unmarshal(*v, &subscriptionQuotaProperties) + if err != nil { + return err + } + sq.SubscriptionQuotaProperties = &subscriptionQuotaProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sq.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sq.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sq.Type = &typeVar + } + } + } + + return nil +} + +// SubscriptionQuotaProperties describes the properties of the quota. +type SubscriptionQuotaProperties struct { + // MaxCount - READ-ONLY; The max permitted usage of this resource. + MaxCount *int32 `json:"maxCount,omitempty"` + // CurrentCount - READ-ONLY; The current usage of this resource. + CurrentCount *int32 `json:"currentCount,omitempty"` +} + +// SubscriptionQuotasListResult result of the GetQuotas operation. It contains a list of quotas for the +// subscription in a particular region. +type SubscriptionQuotasListResult struct { + autorest.Response `json:"-"` + // Value - READ-ONLY; List of quotas for the subscription in a particular region. + Value *[]SubscriptionQuota `json:"value,omitempty"` +} + +// TrackedResource the resource model definition for a ARM tracked top level resource +type TrackedResource struct { + // Tags - Resource tags. + Tags map[string]*string `json:"tags"` + // Location - The geo-location where the resource lives + Location *string `json:"location,omitempty"` + // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; The name of the resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for TrackedResource. +func (tr TrackedResource) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if tr.Tags != nil { + objectMap["tags"] = tr.Tags + } + if tr.Location != nil { + objectMap["location"] = tr.Location + } + return json.Marshal(objectMap) +} + +// Transformation a transformation object, containing all information associated with the named transformation. +// All transformations are contained under a streaming job. +type Transformation struct { + autorest.Response `json:"-"` + // TransformationProperties - The properties that are associated with a transformation. Required on PUT (CreateOrReplace) requests. + *TransformationProperties `json:"properties,omitempty"` + // ID - READ-ONLY; Resource Id + ID *string `json:"id,omitempty"` + // Name - Resource name + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Resource type + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Transformation. +func (t Transformation) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if t.TransformationProperties != nil { + objectMap["properties"] = t.TransformationProperties + } + if t.Name != nil { + objectMap["name"] = t.Name + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Transformation struct. +func (t *Transformation) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var transformationProperties TransformationProperties + err = json.Unmarshal(*v, &transformationProperties) + if err != nil { + return err + } + t.TransformationProperties = &transformationProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + t.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + t.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + t.Type = &typeVar + } + } + } + + return nil +} + +// TransformationProperties the properties that are associated with a transformation. +type TransformationProperties struct { + // StreamingUnits - Specifies the number of streaming units that the streaming job uses. + StreamingUnits *int32 `json:"streamingUnits,omitempty"` + // Query - Specifies the query that will be run in the streaming job. You can learn more about the Stream Analytics Query Language (SAQL) here: https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. + Query *string `json:"query,omitempty"` + // Etag - READ-ONLY; The current entity tag for the transformation. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. + Etag *string `json:"etag,omitempty"` +} + +// MarshalJSON is the custom marshaler for TransformationProperties. +func (tp TransformationProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if tp.StreamingUnits != nil { + objectMap["streamingUnits"] = tp.StreamingUnits + } + if tp.Query != nil { + objectMap["query"] = tp.Query + } + return json.Marshal(objectMap) +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/operations.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/operations.go new file mode 100644 index 000000000000..947e5acf96ac --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/operations.go @@ -0,0 +1,149 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// OperationsClient is the stream Analytics Client +type OperationsClient struct { + BaseClient +} + +// NewOperationsClient creates an instance of the OperationsClient client. +func NewOperationsClient(subscriptionID string) OperationsClient { + return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { + return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// List lists all of the available Stream Analytics related operations. +func (client OperationsClient) List(ctx context.Context) (result OperationListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") + defer func() { + sc := -1 + if result.olr.Response.Response != nil { + sc = result.olr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listNextResults + req, err := client.ListPreparer(ctx) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.olr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", resp, "Failure sending request") + return + } + + result.olr, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", resp, "Failure responding to request") + } + if result.olr.hasNextLink() && result.olr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListPreparer prepares the List request. +func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) { + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPath("/providers/Microsoft.StreamAnalytics/operations"), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) { + return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listNextResults retrieves the next set of results, if any. +func (client OperationsClient) listNextResults(ctx context.Context, lastResults OperationListResult) (result OperationListResult, err error) { + req, err := lastResults.operationListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", resp, "Failure sending next results request") + } + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListComplete enumerates all values, automatically crossing page boundaries as required. +func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.List(ctx) + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/outputs.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/outputs.go new file mode 100644 index 000000000000..b86177ca7001 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/outputs.go @@ -0,0 +1,647 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// OutputsClient is the stream Analytics Client +type OutputsClient struct { + BaseClient +} + +// NewOutputsClient creates an instance of the OutputsClient client. +func NewOutputsClient(subscriptionID string) OutputsClient { + return NewOutputsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewOutputsClientWithBaseURI creates an instance of the OutputsClient client using a custom endpoint. Use this when +// interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewOutputsClientWithBaseURI(baseURI string, subscriptionID string) OutputsClient { + return OutputsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates an output or replaces an already existing output under an existing streaming job. +// Parameters: +// output - the definition of the output that will be used to create a new output or replace the existing one +// under the streaming job. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// outputName - the name of the output. +// ifMatch - the ETag of the output. Omit this value to always overwrite the current output. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new output to be created, but to prevent updating an existing output. +// Other values will result in a 412 Pre-condition Failed response. +func (client OutputsClient) CreateOrReplace(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (result Output, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.OutputsClient", "CreateOrReplace", err.Error()) + } + + req, err := client.CreateOrReplacePreparer(ctx, output, resourceGroupName, jobName, outputName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client OutputsClient) CreateOrReplacePreparer(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithJSON(output), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client OutputsClient) CreateOrReplaceResponder(resp *http.Response) (result Output, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes an output from the streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// outputName - the name of the output. +func (client OutputsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.OutputsClient", "Delete", err.Error()) + } + + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, outputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client OutputsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client OutputsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified output. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// outputName - the name of the output. +func (client OutputsClient) Get(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result Output, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.OutputsClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, outputName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client OutputsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client OutputsClient) GetResponder(resp *http.Response) (result Output, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByStreamingJob lists all of the outputs under the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties +// to include in the response, or "*" to include all properties. By default, all properties are returned except +// diagnostics. Currently only accepts '*' as a valid value. +func (client OutputsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result OutputListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.olr.Response.Response != nil { + sc = result.olr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.OutputsClient", "ListByStreamingJob", err.Error()) + } + + result.fn = client.listByStreamingJobNextResults + req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.olr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", resp, "Failure sending request") + return + } + + result.olr, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", resp, "Failure responding to request") + } + if result.olr.hasNextLink() && result.olr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListByStreamingJobPreparer prepares the ListByStreamingJob request. +func (client OutputsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(selectParameter) > 0 { + queryParameters["$select"] = autorest.Encode("query", selectParameter) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (client OutputsClient) ListByStreamingJobResponder(resp *http.Response) (result OutputListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByStreamingJobNextResults retrieves the next set of results, if any. +func (client OutputsClient) listByStreamingJobNextResults(ctx context.Context, lastResults OutputListResult) (result OutputListResult, err error) { + req, err := lastResults.outputListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByStreamingJobSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByStreamingJobResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. +func (client OutputsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result OutputListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.ListByStreamingJob") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) + return +} + +// Test tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// outputName - the name of the output. +// output - if the output specified does not already exist, this parameter must contain the full output +// definition intended to be tested. If the output specified already exists, this parameter can be left null to +// test the existing output as is or if specified, the properties specified will overwrite the corresponding +// properties in the existing output (exactly like a PATCH operation) and the resulting output will be tested. +func (client OutputsClient) Test(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *Output) (result OutputsTestFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Test") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.OutputsClient", "Test", err.Error()) + } + + req, err := client.TestPreparer(ctx, resourceGroupName, jobName, outputName, output) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = client.TestSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Test", result.Response(), "Failure sending request") + return + } + + return +} + +// TestPreparer prepares the Test request. +func (client OutputsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *Output) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if output != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(output)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// TestSender sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) TestSender(req *http.Request) (future OutputsTestFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// TestResponder handles the response to the Test request. The method always +// closes the http.Response Body. +func (client OutputsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing output under an existing streaming job. This can be used to partially update (ie. update +// one or two properties) an output without affecting the rest the job or output definition. +// Parameters: +// output - an Output object. The properties specified here will overwrite the corresponding properties in the +// existing output (ie. Those properties will be updated). Any properties that are set to null here will mean +// that the corresponding property in the existing output will remain the same and not change as a result of +// this PATCH operation. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// outputName - the name of the output. +// ifMatch - the ETag of the output. Omit this value to always overwrite the current output. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client OutputsClient) Update(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (result Output, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.OutputsClient", "Update", err.Error()) + } + + req, err := client.UpdatePreparer(ctx, output, resourceGroupName, jobName, outputName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client OutputsClient) UpdatePreparer(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "outputName": autorest.Encode("path", outputName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), + autorest.WithJSON(output), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client OutputsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client OutputsClient) UpdateResponder(resp *http.Response) (result Output, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/privateendpoints.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/privateendpoints.go new file mode 100644 index 000000000000..365c8edb0207 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/privateendpoints.go @@ -0,0 +1,447 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// PrivateEndpointsClient is the stream Analytics Client +type PrivateEndpointsClient struct { + BaseClient +} + +// NewPrivateEndpointsClient creates an instance of the PrivateEndpointsClient client. +func NewPrivateEndpointsClient(subscriptionID string) PrivateEndpointsClient { + return NewPrivateEndpointsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewPrivateEndpointsClientWithBaseURI creates an instance of the PrivateEndpointsClient client using a custom +// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure +// stack). +func NewPrivateEndpointsClientWithBaseURI(baseURI string, subscriptionID string) PrivateEndpointsClient { + return PrivateEndpointsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrUpdate creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. +// Parameters: +// privateEndpoint - the definition of the private endpoint that will be used to create a new cluster or +// replace the existing one. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +// privateEndpointName - the name of the private endpoint. +// ifMatch - the ETag of the resource. Omit this value to always overwrite the current record set. Specify the +// last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new resource to be created, but to prevent updating an existing record +// set. Other values will result in a 412 Pre-condition Failed response. +func (client PrivateEndpointsClient) CreateOrUpdate(ctx context.Context, privateEndpoint PrivateEndpoint, resourceGroupName string, clusterName string, privateEndpointName string, ifMatch string, ifNoneMatch string) (result PrivateEndpoint, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.CreateOrUpdate") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", err.Error()) + } + + req, err := client.CreateOrUpdatePreparer(ctx, privateEndpoint, resourceGroupName, clusterName, privateEndpointName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrUpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", resp, "Failure sending request") + return + } + + result, err = client.CreateOrUpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", resp, "Failure responding to request") + } + + return +} + +// CreateOrUpdatePreparer prepares the CreateOrUpdate request. +func (client PrivateEndpointsClient) CreateOrUpdatePreparer(ctx context.Context, privateEndpoint PrivateEndpoint, resourceGroupName string, clusterName string, privateEndpointName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "privateEndpointName": autorest.Encode("path", privateEndpointName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + privateEndpoint.Etag = nil + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}", pathParameters), + autorest.WithJSON(privateEndpoint), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the +// http.Response Body if it receives an error. +func (client PrivateEndpointsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always +// closes the http.Response Body. +func (client PrivateEndpointsClient) CreateOrUpdateResponder(resp *http.Response) (result PrivateEndpoint, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete the specified private endpoint. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +// privateEndpointName - the name of the private endpoint. +func (client PrivateEndpointsClient) Delete(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (result PrivateEndpointsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "Delete", err.Error()) + } + + req, err := client.DeletePreparer(ctx, resourceGroupName, clusterName, privateEndpointName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client PrivateEndpointsClient) DeletePreparer(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "privateEndpointName": autorest.Encode("path", privateEndpointName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client PrivateEndpointsClient) DeleteSender(req *http.Request) (future PrivateEndpointsDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client PrivateEndpointsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets information about the specified Private Endpoint. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +// privateEndpointName - the name of the private endpoint. +func (client PrivateEndpointsClient) Get(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (result PrivateEndpoint, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, clusterName, privateEndpointName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client PrivateEndpointsClient) GetPreparer(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "privateEndpointName": autorest.Encode("path", privateEndpointName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client PrivateEndpointsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client PrivateEndpointsClient) GetResponder(resp *http.Response) (result PrivateEndpoint, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByCluster lists the private endpoints in the cluster. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// clusterName - the name of the cluster. +func (client PrivateEndpointsClient) ListByCluster(ctx context.Context, resourceGroupName string, clusterName string) (result PrivateEndpointListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.ListByCluster") + defer func() { + sc := -1 + if result.pelr.Response.Response != nil { + sc = result.pelr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "ListByCluster", err.Error()) + } + + result.fn = client.listByClusterNextResults + req, err := client.ListByClusterPreparer(ctx, resourceGroupName, clusterName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "ListByCluster", nil, "Failure preparing request") + return + } + + resp, err := client.ListByClusterSender(req) + if err != nil { + result.pelr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "ListByCluster", resp, "Failure sending request") + return + } + + result.pelr, err = client.ListByClusterResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "ListByCluster", resp, "Failure responding to request") + } + if result.pelr.hasNextLink() && result.pelr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListByClusterPreparer prepares the ListByCluster request. +func (client PrivateEndpointsClient) ListByClusterPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "clusterName": autorest.Encode("path", clusterName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2020-03-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByClusterSender sends the ListByCluster request. The method will close the +// http.Response Body if it receives an error. +func (client PrivateEndpointsClient) ListByClusterSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByClusterResponder handles the response to the ListByCluster request. The method always +// closes the http.Response Body. +func (client PrivateEndpointsClient) ListByClusterResponder(resp *http.Response) (result PrivateEndpointListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByClusterNextResults retrieves the next set of results, if any. +func (client PrivateEndpointsClient) listByClusterNextResults(ctx context.Context, lastResults PrivateEndpointListResult) (result PrivateEndpointListResult, err error) { + req, err := lastResults.privateEndpointListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "listByClusterNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByClusterSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "listByClusterNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByClusterResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "listByClusterNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByClusterComplete enumerates all values, automatically crossing page boundaries as required. +func (client PrivateEndpointsClient) ListByClusterComplete(ctx context.Context, resourceGroupName string, clusterName string) (result PrivateEndpointListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.ListByCluster") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByCluster(ctx, resourceGroupName, clusterName) + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/streamanalyticsapi/interfaces.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/streamanalyticsapi/interfaces.go new file mode 100644 index 000000000000..836b9de79df1 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/streamanalyticsapi/interfaces.go @@ -0,0 +1,131 @@ +package streamanalyticsapi + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/azure-sdk-for-go/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics" + "github.com/Azure/go-autorest/autorest" +) + +// FunctionsClientAPI contains the set of methods on the FunctionsClient type. +type FunctionsClientAPI interface { + CreateOrReplace(ctx context.Context, function streamanalytics.Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (result streamanalytics.Function, err error) + Delete(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result autorest.Response, err error) + Get(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result streamanalytics.Function, err error) + ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result streamanalytics.FunctionListResultPage, err error) + ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result streamanalytics.FunctionListResultIterator, err error) + RetrieveDefaultDefinition(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *streamanalytics.BasicFunctionRetrieveDefaultDefinitionParameters) (result streamanalytics.Function, err error) + Test(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *streamanalytics.Function) (result streamanalytics.FunctionsTestFuture, err error) + Update(ctx context.Context, function streamanalytics.Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (result streamanalytics.Function, err error) +} + +var _ FunctionsClientAPI = (*streamanalytics.FunctionsClient)(nil) + +// InputsClientAPI contains the set of methods on the InputsClient type. +type InputsClientAPI interface { + CreateOrReplace(ctx context.Context, input streamanalytics.Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (result streamanalytics.Input, err error) + Delete(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result autorest.Response, err error) + Get(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result streamanalytics.Input, err error) + ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result streamanalytics.InputListResultPage, err error) + ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result streamanalytics.InputListResultIterator, err error) + Test(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *streamanalytics.Input) (result streamanalytics.InputsTestFuture, err error) + Update(ctx context.Context, input streamanalytics.Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (result streamanalytics.Input, err error) +} + +var _ InputsClientAPI = (*streamanalytics.InputsClient)(nil) + +// OutputsClientAPI contains the set of methods on the OutputsClient type. +type OutputsClientAPI interface { + CreateOrReplace(ctx context.Context, output streamanalytics.Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (result streamanalytics.Output, err error) + Delete(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result autorest.Response, err error) + Get(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result streamanalytics.Output, err error) + ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result streamanalytics.OutputListResultPage, err error) + ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result streamanalytics.OutputListResultIterator, err error) + Test(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *streamanalytics.Output) (result streamanalytics.OutputsTestFuture, err error) + Update(ctx context.Context, output streamanalytics.Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (result streamanalytics.Output, err error) +} + +var _ OutputsClientAPI = (*streamanalytics.OutputsClient)(nil) + +// StreamingJobsClientAPI contains the set of methods on the StreamingJobsClient type. +type StreamingJobsClientAPI interface { + CreateOrReplace(ctx context.Context, streamingJob streamanalytics.StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (result streamanalytics.StreamingJobsCreateOrReplaceFuture, err error) + Delete(ctx context.Context, resourceGroupName string, jobName string) (result streamanalytics.StreamingJobsDeleteFuture, err error) + Get(ctx context.Context, resourceGroupName string, jobName string, expand string) (result streamanalytics.StreamingJob, err error) + List(ctx context.Context, expand string) (result streamanalytics.StreamingJobListResultPage, err error) + ListComplete(ctx context.Context, expand string) (result streamanalytics.StreamingJobListResultIterator, err error) + ListByResourceGroup(ctx context.Context, resourceGroupName string, expand string) (result streamanalytics.StreamingJobListResultPage, err error) + ListByResourceGroupComplete(ctx context.Context, resourceGroupName string, expand string) (result streamanalytics.StreamingJobListResultIterator, err error) + Start(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *streamanalytics.StartStreamingJobParameters) (result streamanalytics.StreamingJobsStartFuture, err error) + Stop(ctx context.Context, resourceGroupName string, jobName string) (result streamanalytics.StreamingJobsStopFuture, err error) + Update(ctx context.Context, streamingJob streamanalytics.StreamingJob, resourceGroupName string, jobName string, ifMatch string) (result streamanalytics.StreamingJob, err error) +} + +var _ StreamingJobsClientAPI = (*streamanalytics.StreamingJobsClient)(nil) + +// SubscriptionsClientAPI contains the set of methods on the SubscriptionsClient type. +type SubscriptionsClientAPI interface { + ListQuotas(ctx context.Context, location string) (result streamanalytics.SubscriptionQuotasListResult, err error) +} + +var _ SubscriptionsClientAPI = (*streamanalytics.SubscriptionsClient)(nil) + +// TransformationsClientAPI contains the set of methods on the TransformationsClient type. +type TransformationsClientAPI interface { + CreateOrReplace(ctx context.Context, transformation streamanalytics.Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (result streamanalytics.Transformation, err error) + Get(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (result streamanalytics.Transformation, err error) + Update(ctx context.Context, transformation streamanalytics.Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (result streamanalytics.Transformation, err error) +} + +var _ TransformationsClientAPI = (*streamanalytics.TransformationsClient)(nil) + +// OperationsClientAPI contains the set of methods on the OperationsClient type. +type OperationsClientAPI interface { + List(ctx context.Context) (result streamanalytics.OperationListResultPage, err error) + ListComplete(ctx context.Context) (result streamanalytics.OperationListResultIterator, err error) +} + +var _ OperationsClientAPI = (*streamanalytics.OperationsClient)(nil) + +// ClustersClientAPI contains the set of methods on the ClustersClient type. +type ClustersClientAPI interface { + CreateOrUpdate(ctx context.Context, cluster streamanalytics.Cluster, resourceGroupName string, clusterName string, ifMatch string, ifNoneMatch string) (result streamanalytics.ClustersCreateOrUpdateFuture, err error) + Delete(ctx context.Context, resourceGroupName string, clusterName string) (result streamanalytics.ClustersDeleteFuture, err error) + Get(ctx context.Context, resourceGroupName string, clusterName string) (result streamanalytics.Cluster, err error) + ListByResourceGroup(ctx context.Context, resourceGroupName string) (result streamanalytics.ClusterListResultPage, err error) + ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result streamanalytics.ClusterListResultIterator, err error) + ListBySubscription(ctx context.Context) (result streamanalytics.ClusterListResultPage, err error) + ListBySubscriptionComplete(ctx context.Context) (result streamanalytics.ClusterListResultIterator, err error) + ListStreamingJobs(ctx context.Context, resourceGroupName string, clusterName string) (result streamanalytics.ClusterJobListResultPage, err error) + ListStreamingJobsComplete(ctx context.Context, resourceGroupName string, clusterName string) (result streamanalytics.ClusterJobListResultIterator, err error) + Update(ctx context.Context, cluster streamanalytics.Cluster, resourceGroupName string, clusterName string, ifMatch string) (result streamanalytics.ClustersUpdateFuture, err error) +} + +var _ ClustersClientAPI = (*streamanalytics.ClustersClient)(nil) + +// PrivateEndpointsClientAPI contains the set of methods on the PrivateEndpointsClient type. +type PrivateEndpointsClientAPI interface { + CreateOrUpdate(ctx context.Context, privateEndpoint streamanalytics.PrivateEndpoint, resourceGroupName string, clusterName string, privateEndpointName string, ifMatch string, ifNoneMatch string) (result streamanalytics.PrivateEndpoint, err error) + Delete(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (result streamanalytics.PrivateEndpointsDeleteFuture, err error) + Get(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (result streamanalytics.PrivateEndpoint, err error) + ListByCluster(ctx context.Context, resourceGroupName string, clusterName string) (result streamanalytics.PrivateEndpointListResultPage, err error) + ListByClusterComplete(ctx context.Context, resourceGroupName string, clusterName string) (result streamanalytics.PrivateEndpointListResultIterator, err error) +} + +var _ PrivateEndpointsClientAPI = (*streamanalytics.PrivateEndpointsClient)(nil) diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/streamingjobs.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/streamingjobs.go new file mode 100644 index 000000000000..0d6b9fecaffa --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/streamingjobs.go @@ -0,0 +1,849 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// StreamingJobsClient is the stream Analytics Client +type StreamingJobsClient struct { + BaseClient +} + +// NewStreamingJobsClient creates an instance of the StreamingJobsClient client. +func NewStreamingJobsClient(subscriptionID string) StreamingJobsClient { + return NewStreamingJobsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewStreamingJobsClientWithBaseURI creates an instance of the StreamingJobsClient client using a custom endpoint. +// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewStreamingJobsClientWithBaseURI(baseURI string, subscriptionID string) StreamingJobsClient { + return StreamingJobsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates a streaming job or replaces an already existing streaming job. +// Parameters: +// streamingJob - the definition of the streaming job that will be used to create a new streaming job or +// replace the existing one. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// ifMatch - the ETag of the streaming job. Omit this value to always overwrite the current record set. Specify +// the last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new streaming job to be created, but to prevent updating an existing +// record set. Other values will result in a 412 Pre-condition Failed response. +func (client StreamingJobsClient) CreateOrReplace(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (result StreamingJobsCreateOrReplaceFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "CreateOrReplace", err.Error()) + } + + req, err := client.CreateOrReplacePreparer(ctx, streamingJob, resourceGroupName, jobName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result, err = client.CreateOrReplaceSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "CreateOrReplace", result.Response(), "Failure sending request") + return + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client StreamingJobsClient) CreateOrReplacePreparer(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithJSON(streamingJob), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) CreateOrReplaceSender(req *http.Request) (future StreamingJobsCreateOrReplaceFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) CreateOrReplaceResponder(resp *http.Response) (result StreamingJob, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deletes a streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +func (client StreamingJobsClient) Delete(ctx context.Context, resourceGroupName string, jobName string) (result StreamingJobsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "Delete", err.Error()) + } + + req, err := client.DeletePreparer(ctx, resourceGroupName, jobName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client StreamingJobsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) DeleteSender(req *http.Request) (future StreamingJobsDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get gets details about the specified streaming job. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job +// properties to include in the response, beyond the default set returned when this parameter is absent. The +// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and +// 'functions'. +func (client StreamingJobsClient) Get(ctx context.Context, resourceGroupName string, jobName string, expand string) (result StreamingJob, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, expand) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client StreamingJobsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, expand string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(expand) > 0 { + queryParameters["$expand"] = autorest.Encode("query", expand) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) GetResponder(resp *http.Response) (result StreamingJob, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// List lists all of the streaming jobs in the given subscription. +// Parameters: +// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job +// properties to include in the response, beyond the default set returned when this parameter is absent. The +// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and +// 'functions'. +func (client StreamingJobsClient) List(ctx context.Context, expand string) (result StreamingJobListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.List") + defer func() { + sc := -1 + if result.sjlr.Response.Response != nil { + sc = result.sjlr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "List", err.Error()) + } + + result.fn = client.listNextResults + req, err := client.ListPreparer(ctx, expand) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.sjlr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", resp, "Failure sending request") + return + } + + result.sjlr, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", resp, "Failure responding to request") + } + if result.sjlr.hasNextLink() && result.sjlr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListPreparer prepares the List request. +func (client StreamingJobsClient) ListPreparer(ctx context.Context, expand string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(expand) > 0 { + queryParameters["$expand"] = autorest.Encode("query", expand) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) ListSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) ListResponder(resp *http.Response) (result StreamingJobListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listNextResults retrieves the next set of results, if any. +func (client StreamingJobsClient) listNextResults(ctx context.Context, lastResults StreamingJobListResult) (result StreamingJobListResult, err error) { + req, err := lastResults.streamingJobListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", resp, "Failure sending next results request") + } + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListComplete enumerates all values, automatically crossing page boundaries as required. +func (client StreamingJobsClient) ListComplete(ctx context.Context, expand string) (result StreamingJobListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.List") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.List(ctx, expand) + return +} + +// ListByResourceGroup lists all of the streaming jobs in the specified resource group. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job +// properties to include in the response, beyond the default set returned when this parameter is absent. The +// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and +// 'functions'. +func (client StreamingJobsClient) ListByResourceGroup(ctx context.Context, resourceGroupName string, expand string) (result StreamingJobListResultPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.sjlr.Response.Response != nil { + sc = result.sjlr.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "ListByResourceGroup", err.Error()) + } + + result.fn = client.listByResourceGroupNextResults + req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName, expand) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.sjlr.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", resp, "Failure sending request") + return + } + + result.sjlr, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", resp, "Failure responding to request") + } + if result.sjlr.hasNextLink() && result.sjlr.IsEmpty() { + err = result.NextWithContext(ctx) + } + + return +} + +// ListByResourceGroupPreparer prepares the ListByResourceGroup request. +func (client StreamingJobsClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string, expand string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(expand) > 0 { + queryParameters["$expand"] = autorest.Encode("query", expand) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) ListByResourceGroupResponder(resp *http.Response) (result StreamingJobListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByResourceGroupNextResults retrieves the next set of results, if any. +func (client StreamingJobsClient) listByResourceGroupNextResults(ctx context.Context, lastResults StreamingJobListResult) (result StreamingJobListResult, err error) { + req, err := lastResults.streamingJobListResultPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. +func (client StreamingJobsClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string, expand string) (result StreamingJobListResultIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByResourceGroup(ctx, resourceGroupName, expand) + return +} + +// Start starts a streaming job. Once a job is started it will start processing input events and produce output. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// startJobParameters - parameters applicable to a start streaming job operation. +func (client StreamingJobsClient) Start(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *StartStreamingJobParameters) (result StreamingJobsStartFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Start") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "Start", err.Error()) + } + + req, err := client.StartPreparer(ctx, resourceGroupName, jobName, startJobParameters) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Start", nil, "Failure preparing request") + return + } + + result, err = client.StartSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Start", result.Response(), "Failure sending request") + return + } + + return +} + +// StartPreparer prepares the Start request. +func (client StreamingJobsClient) StartPreparer(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *StartStreamingJobParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start", pathParameters), + autorest.WithQueryParameters(queryParameters)) + if startJobParameters != nil { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithJSON(startJobParameters)) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// StartSender sends the Start request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) StartSender(req *http.Request) (future StreamingJobsStartFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// StartResponder handles the response to the Start request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) StartResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByClosing()) + result.Response = resp + return +} + +// Stop stops a running streaming job. This will cause a running streaming job to stop processing input events and +// producing output. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +func (client StreamingJobsClient) Stop(ctx context.Context, resourceGroupName string, jobName string) (result StreamingJobsStopFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Stop") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "Stop", err.Error()) + } + + req, err := client.StopPreparer(ctx, resourceGroupName, jobName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Stop", nil, "Failure preparing request") + return + } + + result, err = client.StopSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Stop", result.Response(), "Failure sending request") + return + } + + return +} + +// StopPreparer prepares the Stop request. +func (client StreamingJobsClient) StopPreparer(ctx context.Context, resourceGroupName string, jobName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// StopSender sends the Stop request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) StopSender(req *http.Request) (future StreamingJobsStopFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// StopResponder handles the response to the Stop request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) StopResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByClosing()) + result.Response = resp + return +} + +// Update updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a +// streaming job without affecting the rest the job definition. +// Parameters: +// streamingJob - a streaming job object. The properties specified here will overwrite the corresponding +// properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set +// to null here will mean that the corresponding property in the existing input will remain the same and not +// change as a result of this PATCH operation. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// ifMatch - the ETag of the streaming job. Omit this value to always overwrite the current record set. Specify +// the last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client StreamingJobsClient) Update(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string) (result StreamingJob, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.StreamingJobsClient", "Update", err.Error()) + } + + req, err := client.UpdatePreparer(ctx, streamingJob, resourceGroupName, jobName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client StreamingJobsClient) UpdatePreparer(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), + autorest.WithJSON(streamingJob), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client StreamingJobsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client StreamingJobsClient) UpdateResponder(resp *http.Response) (result StreamingJob, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/subscriptions.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/subscriptions.go new file mode 100644 index 000000000000..9f21ac29a477 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/subscriptions.go @@ -0,0 +1,123 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// SubscriptionsClient is the stream Analytics Client +type SubscriptionsClient struct { + BaseClient +} + +// NewSubscriptionsClient creates an instance of the SubscriptionsClient client. +func NewSubscriptionsClient(subscriptionID string) SubscriptionsClient { + return NewSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewSubscriptionsClientWithBaseURI creates an instance of the SubscriptionsClient client using a custom endpoint. +// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewSubscriptionsClientWithBaseURI(baseURI string, subscriptionID string) SubscriptionsClient { + return SubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// ListQuotas retrieves the subscription's current quota information in a particular region. +// Parameters: +// location - the region in which to retrieve the subscription's quota information. You can find out which +// regions Azure Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/ +func (client SubscriptionsClient) ListQuotas(ctx context.Context, location string) (result SubscriptionQuotasListResult, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SubscriptionsClient.ListQuotas") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.SubscriptionsClient", "ListQuotas", err.Error()) + } + + req, err := client.ListQuotasPreparer(ctx, location) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", nil, "Failure preparing request") + return + } + + resp, err := client.ListQuotasSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure sending request") + return + } + + result, err = client.ListQuotasResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure responding to request") + } + + return +} + +// ListQuotasPreparer prepares the ListQuotas request. +func (client SubscriptionsClient) ListQuotasPreparer(ctx context.Context, location string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "location": autorest.Encode("path", location), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListQuotasSender sends the ListQuotas request. The method will close the +// http.Response Body if it receives an error. +func (client SubscriptionsClient) ListQuotasSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListQuotasResponder handles the response to the ListQuotas request. The method always +// closes the http.Response Body. +func (client SubscriptionsClient) ListQuotasResponder(resp *http.Response) (result SubscriptionQuotasListResult, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/transformations.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/transformations.go new file mode 100644 index 000000000000..06eb7a2fe7da --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/transformations.go @@ -0,0 +1,334 @@ +package streamanalytics + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// TransformationsClient is the stream Analytics Client +type TransformationsClient struct { + BaseClient +} + +// NewTransformationsClient creates an instance of the TransformationsClient client. +func NewTransformationsClient(subscriptionID string) TransformationsClient { + return NewTransformationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewTransformationsClientWithBaseURI creates an instance of the TransformationsClient client using a custom endpoint. +// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewTransformationsClientWithBaseURI(baseURI string, subscriptionID string) TransformationsClient { + return TransformationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CreateOrReplace creates a transformation or replaces an already existing transformation under an existing streaming +// job. +// Parameters: +// transformation - the definition of the transformation that will be used to create a new transformation or +// replace the existing one under the streaming job. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// transformationName - the name of the transformation. +// ifMatch - the ETag of the transformation. Omit this value to always overwrite the current transformation. +// Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. +// ifNoneMatch - set to '*' to allow a new transformation to be created, but to prevent updating an existing +// transformation. Other values will result in a 412 Pre-condition Failed response. +func (client TransformationsClient) CreateOrReplace(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (result Transformation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.CreateOrReplace") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.TransformationsClient", "CreateOrReplace", err.Error()) + } + + req, err := client.CreateOrReplacePreparer(ctx, transformation, resourceGroupName, jobName, transformationName, ifMatch, ifNoneMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + resp, err := client.CreateOrReplaceSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", resp, "Failure sending request") + return + } + + result, err = client.CreateOrReplaceResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", resp, "Failure responding to request") + } + + return +} + +// CreateOrReplacePreparer prepares the CreateOrReplace request. +func (client TransformationsClient) CreateOrReplacePreparer(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "transformationName": autorest.Encode("path", transformationName), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), + autorest.WithJSON(transformation), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + if len(ifNoneMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (client TransformationsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (client TransformationsClient) CreateOrReplaceResponder(resp *http.Response) (result Transformation, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get gets details about the specified transformation. +// Parameters: +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// transformationName - the name of the transformation. +func (client TransformationsClient) Get(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (result Transformation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.TransformationsClient", "Get", err.Error()) + } + + req, err := client.GetPreparer(ctx, resourceGroupName, jobName, transformationName) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client TransformationsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "transformationName": autorest.Encode("path", transformationName), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client TransformationsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client TransformationsClient) GetResponder(resp *http.Response) (result Transformation, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Update updates an existing transformation under an existing streaming job. This can be used to partially update (ie. +// update one or two properties) a transformation without affecting the rest the job or transformation definition. +// Parameters: +// transformation - a Transformation object. The properties specified here will overwrite the corresponding +// properties in the existing transformation (ie. Those properties will be updated). Any properties that are +// set to null here will mean that the corresponding property in the existing transformation will remain the +// same and not change as a result of this PATCH operation. +// resourceGroupName - the name of the resource group. The name is case insensitive. +// jobName - the name of the streaming job. +// transformationName - the name of the transformation. +// ifMatch - the ETag of the transformation. Omit this value to always overwrite the current transformation. +// Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. +func (client TransformationsClient) Update(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (result Transformation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: client.SubscriptionID, + Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, + {TargetValue: resourceGroupName, + Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, + {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, + {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { + return result, validation.NewError("streamanalytics.TransformationsClient", "Update", err.Error()) + } + + req, err := client.UpdatePreparer(ctx, transformation, resourceGroupName, jobName, transformationName, ifMatch) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client TransformationsClient) UpdatePreparer(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "jobName": autorest.Encode("path", jobName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "transformationName": autorest.Encode("path", transformationName), + } + + const APIVersion = "2017-04-01-preview" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), + autorest.WithJSON(transformation), + autorest.WithQueryParameters(queryParameters)) + if len(ifMatch) > 0 { + preparer = autorest.DecoratePreparer(preparer, + autorest.WithHeader("If-Match", autorest.String(ifMatch))) + } + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client TransformationsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client TransformationsClient) UpdateResponder(resp *http.Response) (result Transformation, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/version.go b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/version.go new file mode 100644 index 000000000000..7dbcffbe3775 --- /dev/null +++ b/services/preview/streamanalytics/mgmt/2020-03-01-preview/streamanalytics/version.go @@ -0,0 +1,30 @@ +package streamanalytics + +import "github.com/Azure/azure-sdk-for-go/version" + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// UserAgent returns the UserAgent string to use when sending http.Requests. +func UserAgent() string { + return "Azure-SDK-For-Go/" + Version() + " streamanalytics/2020-03-01-preview" +} + +// Version returns the semantic version (see http://semver.org) of the client. +func Version() string { + return version.Number +}